ngram
listlengths
0
82k
[ "+= 1 return n if __name__== '__main__': solution = Solution()", "while curr < n: if nums[curr] == val: nums[curr] =", "curr < n: if nums[curr] == val: nums[curr] = nums[n-1]", "Solution() nums = [3,2,2,3] val = 3 ans = solution.removeElement(nums,", "__name__== '__main__': solution = Solution() nums = [3,2,2,3] val =", "= nums[n-1] n -= 1 else: curr += 1 return", "int) -> int: if not nums: return 0 curr =", "-= 1 else: curr += 1 return n if __name__==", "Solution: def removeElement(self, nums: List[int], val: int) -> int: if", "nums: List[int], val: int) -> int: if not nums: return", "import List class Solution: def removeElement(self, nums: List[int], val: int)", "0 n = len(nums) while curr < n: if nums[curr]", "from typing import List class Solution: def removeElement(self, nums: List[int],", "1 return n if __name__== '__main__': solution = Solution() nums", "'__main__': solution = Solution() nums = [3,2,2,3] val = 3", "== val: nums[curr] = nums[n-1] n -= 1 else: curr", "< n: if nums[curr] == val: nums[curr] = nums[n-1] n", "curr += 1 return n if __name__== '__main__': solution =", "removeElement(self, nums: List[int], val: int) -> int: if not nums:", "val = 3 ans = solution.removeElement(nums, val) # print(ans) print(nums[:ans])", "int: if not nums: return 0 curr = 0 n", "-> int: if not nums: return 0 curr = 0", "nums[curr] = nums[n-1] n -= 1 else: curr += 1", "n = len(nums) while curr < n: if nums[curr] ==", "else: curr += 1 return n if __name__== '__main__': solution", "if nums[curr] == val: nums[curr] = nums[n-1] n -= 1", "nums[n-1] n -= 1 else: curr += 1 return n", "not nums: return 0 curr = 0 n = len(nums)", "solution = Solution() nums = [3,2,2,3] val = 3 ans", "curr = 0 n = len(nums) while curr < n:", "n -= 1 else: curr += 1 return n if", "n if __name__== '__main__': solution = Solution() nums = [3,2,2,3]", "nums: return 0 curr = 0 n = len(nums) while", "def removeElement(self, nums: List[int], val: int) -> int: if not", "= Solution() nums = [3,2,2,3] val = 3 ans =", "nums = [3,2,2,3] val = 3 ans = solution.removeElement(nums, val)", "[3,2,2,3] val = 3 ans = solution.removeElement(nums, val) # print(ans)", "1 else: curr += 1 return n if __name__== '__main__':", "n: if nums[curr] == val: nums[curr] = nums[n-1] n -=", "val: int) -> int: if not nums: return 0 curr", "val: nums[curr] = nums[n-1] n -= 1 else: curr +=", "return 0 curr = 0 n = len(nums) while curr", "= len(nums) while curr < n: if nums[curr] == val:", "0 curr = 0 n = len(nums) while curr <", "if __name__== '__main__': solution = Solution() nums = [3,2,2,3] val", "return n if __name__== '__main__': solution = Solution() nums =", "List class Solution: def removeElement(self, nums: List[int], val: int) ->", "<gh_stars>0 from typing import List class Solution: def removeElement(self, nums:", "typing import List class Solution: def removeElement(self, nums: List[int], val:", "class Solution: def removeElement(self, nums: List[int], val: int) -> int:", "len(nums) while curr < n: if nums[curr] == val: nums[curr]", "= [3,2,2,3] val = 3 ans = solution.removeElement(nums, val) #", "List[int], val: int) -> int: if not nums: return 0", "if not nums: return 0 curr = 0 n =", "= 0 n = len(nums) while curr < n: if", "nums[curr] == val: nums[curr] = nums[n-1] n -= 1 else:" ]
[ "platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory from platformio.compat import aio_get_running_loop from platformio.exception import", "get_core_package_dir(\"contrib-piohome\") if not os.path.isdir(contrib_dir): raise PlatformioException(\"Invalid path to PIO Home", "2.0 (the \"License\"); # you may not use this file", "ShutdownMiddleware: def __init__(self, app): self.app = app async def __call__(self,", "if not os.path.isdir(contrib_dir): raise PlatformioException(\"Invalid path to PIO Home Contrib\")", "urlparse(home_url).path routes = [ WebSocketRoute(path + \"wsrpc\", ws_rpc_factory, name=\"wsrpc\"), Route(path", "AccountRPC from platformio.commands.home.rpc.handlers.app import AppRPC from platformio.commands.home.rpc.handlers.ide import IDERPC from", "from platformio.commands.home.rpc.handlers.os import OSRPC from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC from platformio.commands.home.rpc.handlers.project", "not os.path.isdir(contrib_dir): raise PlatformioException(\"Invalid path to PIO Home Contrib\") ws_rpc_factory", "routes=routes, on_startup=[ lambda: click.echo( \"PIO Home has been started. Press", "import PlainTextResponse from starlette.routing import Mount, Route, WebSocketRoute from starlette.staticfiles", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "import OSRPC from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC from platformio.commands.home.rpc.handlers.project import ProjectRPC", "from platformio.package.manager.core import get_core_package_dir from platformio.proc import force_exit class ShutdownMiddleware:", "\"wsrpc\", ws_rpc_factory, name=\"wsrpc\"), Route(path + \"__shutdown__\", shutdown_server, methods=[\"POST\"]), Mount(path, StaticFiles(directory=contrib_dir,", "routes.append(Route(\"/\", protected_page)) uvicorn.run( Starlette( middleware=[Middleware(ShutdownMiddleware)], routes=routes, on_startup=[ lambda: click.echo( \"PIO", ") def run_server(host, port, no_open, shutdown_timeout, home_url): contrib_dir = get_core_package_dir(\"contrib-piohome\")", "PlainTextResponse( \"Protected PlatformIO Home session\", status_code=HTTP_403_FORBIDDEN ) def run_server(host, port,", "aio_get_running_loop().call_later(0.5, force_exit) return PlainTextResponse(\"Server has been shutdown!\") async def protected_page(_):", "License. import os from urllib.parse import urlparse import click import", "starlette.staticfiles import StaticFiles from starlette.status import HTTP_403_FORBIDDEN from platformio.commands.home.rpc.handlers.account import", "use this file except in compliance with the License. #", "from platformio.commands.home.rpc.handlers.account import AccountRPC from platformio.commands.home.rpc.handlers.app import AppRPC from platformio.commands.home.rpc.handlers.ide", "and # limitations under the License. import os from urllib.parse", "PlatformioException(\"Invalid path to PIO Home Contrib\") ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(),", "get_core_package_dir from platformio.proc import force_exit class ShutdownMiddleware: def __init__(self, app):", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"/\": routes.append(Route(\"/\", protected_page)) uvicorn.run( Starlette( middleware=[Middleware(ShutdownMiddleware)], routes=routes, on_startup=[ lambda: click.echo(", "License. # You may obtain a copy of the License", "from starlette.middleware import Middleware from starlette.responses import PlainTextResponse from starlette.routing", "shutdown_server(_=None): aio_get_running_loop().call_later(0.5, force_exit) return PlainTextResponse(\"Server has been shutdown!\") async def", "under the License is distributed on an \"AS IS\" BASIS,", "from starlette.routing import Mount, Route, WebSocketRoute from starlette.staticfiles import StaticFiles", "License for the specific language governing permissions and # limitations", "path = urlparse(home_url).path routes = [ WebSocketRoute(path + \"wsrpc\", ws_rpc_factory,", "import PIOCoreRPC from platformio.commands.home.rpc.handlers.project import ProjectRPC from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory", "starlette.responses import PlainTextResponse from starlette.routing import Mount, Route, WebSocketRoute from", "ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\") path =", "path to PIO Home Contrib\") ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\")", "(c) 2014-present PlatformIO <<EMAIL>> # # Licensed under the Apache", "send): if scope[\"type\"] == \"http\" and b\"__shutdown__\" in scope.get(\"query_string\", {}):", "WebSocketJSONRPCServerFactory from platformio.compat import aio_get_running_loop from platformio.exception import PlatformioException from", "starlette.routing import Mount, Route, WebSocketRoute from starlette.staticfiles import StaticFiles from", "Home session\", status_code=HTTP_403_FORBIDDEN ) def run_server(host, port, no_open, shutdown_timeout, home_url):", "import Mount, Route, WebSocketRoute from starlette.staticfiles import StaticFiles from starlette.status", "StaticFiles(directory=contrib_dir, html=True), name=\"static\"), ] if path != \"/\": routes.append(Route(\"/\", protected_page))", "in compliance with the License. # You may obtain a", "app): self.app = app async def __call__(self, scope, receive, send):", "namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\") path = urlparse(home_url).path", "limitations under the License. import os from urllib.parse import urlparse", "software # distributed under the License is distributed on an", "namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\") path = urlparse(home_url).path routes = [ WebSocketRoute(path", "WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(),", "import StaticFiles from starlette.status import HTTP_403_FORBIDDEN from platformio.commands.home.rpc.handlers.account import AccountRPC", "home_url): contrib_dir = get_core_package_dir(\"contrib-piohome\") if not os.path.isdir(contrib_dir): raise PlatformioException(\"Invalid path", "PlatformIO <<EMAIL>> # # Licensed under the Apache License, Version", "has been shutdown!\") async def protected_page(_): return PlainTextResponse( \"Protected PlatformIO", "Middleware from starlette.responses import PlainTextResponse from starlette.routing import Mount, Route,", "import click import uvicorn from starlette.applications import Starlette from starlette.middleware", "aio_get_running_loop from platformio.exception import PlatformioException from platformio.package.manager.core import get_core_package_dir from", "!= \"/\": routes.append(Route(\"/\", protected_page)) uvicorn.run( Starlette( middleware=[Middleware(ShutdownMiddleware)], routes=routes, on_startup=[ lambda:", "platformio.proc import force_exit class ShutdownMiddleware: def __init__(self, app): self.app =", "urlparse import click import uvicorn from starlette.applications import Starlette from", "to PIO Home Contrib\") ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(),", "path != \"/\": routes.append(Route(\"/\", protected_page)) uvicorn.run( Starlette( middleware=[Middleware(ShutdownMiddleware)], routes=routes, on_startup=[", "Press Ctrl+C to shutdown.\" ), lambda: None if no_open else", "os from urllib.parse import urlparse import click import uvicorn from", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\") path = urlparse(home_url).path routes = [", "to in writing, software # distributed under the License is", "protected_page(_): return PlainTextResponse( \"Protected PlatformIO Home session\", status_code=HTTP_403_FORBIDDEN ) def", "# See the License for the specific language governing permissions", "def __init__(self, app): self.app = app async def __call__(self, scope,", "WebSocketRoute from starlette.staticfiles import StaticFiles from starlette.status import HTTP_403_FORBIDDEN from", "or agreed to in writing, software # distributed under the", "import uvicorn from starlette.applications import Starlette from starlette.middleware import Middleware", "<<EMAIL>> # # Licensed under the Apache License, Version 2.0", "required by applicable law or agreed to in writing, software", "in scope.get(\"query_string\", {}): await shutdown_server() await self.app(scope, receive, send) async", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "with the License. # You may obtain a copy of", "Starlette( middleware=[Middleware(ShutdownMiddleware)], routes=routes, on_startup=[ lambda: click.echo( \"PIO Home has been", "compliance with the License. # You may obtain a copy", "PlainTextResponse from starlette.routing import Mount, Route, WebSocketRoute from starlette.staticfiles import", "agreed to in writing, software # distributed under the License", "platformio.commands.home.rpc.handlers.project import ProjectRPC from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory from platformio.compat import", "distributed under the License is distributed on an \"AS IS\"", "express or implied. # See the License for the specific", "2014-present PlatformIO <<EMAIL>> # # Licensed under the Apache License,", "except in compliance with the License. # You may obtain", "self.app = app async def __call__(self, scope, receive, send): if", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "import MiscRPC from platformio.commands.home.rpc.handlers.os import OSRPC from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC", "not use this file except in compliance with the License.", "import aio_get_running_loop from platformio.exception import PlatformioException from platformio.package.manager.core import get_core_package_dir", "= urlparse(home_url).path routes = [ WebSocketRoute(path + \"wsrpc\", ws_rpc_factory, name=\"wsrpc\"),", "port, no_open, shutdown_timeout, home_url): contrib_dir = get_core_package_dir(\"contrib-piohome\") if not os.path.isdir(contrib_dir):", "writing, software # distributed under the License is distributed on", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "lambda: None if no_open else click.launch(home_url), ], ), host=host, port=port,", "on_startup=[ lambda: click.echo( \"PIO Home has been started. Press Ctrl+C", "if path != \"/\": routes.append(Route(\"/\", protected_page)) uvicorn.run( Starlette( middleware=[Middleware(ShutdownMiddleware)], routes=routes,", "MiscRPC from platformio.commands.home.rpc.handlers.os import OSRPC from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC from", "permissions and # limitations under the License. import os from", "# limitations under the License. import os from urllib.parse import", "CONDITIONS OF ANY KIND, either express or implied. # See", "import WebSocketJSONRPCServerFactory from platformio.compat import aio_get_running_loop from platformio.exception import PlatformioException", "def run_server(host, port, no_open, shutdown_timeout, home_url): contrib_dir = get_core_package_dir(\"contrib-piohome\") if", "methods=[\"POST\"]), Mount(path, StaticFiles(directory=contrib_dir, html=True), name=\"static\"), ] if path != \"/\":", "await shutdown_server() await self.app(scope, receive, send) async def shutdown_server(_=None): aio_get_running_loop().call_later(0.5,", "been started. Press Ctrl+C to shutdown.\" ), lambda: None if", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "{}): await shutdown_server() await self.app(scope, receive, send) async def shutdown_server(_=None):", "starlette.status import HTTP_403_FORBIDDEN from platformio.commands.home.rpc.handlers.account import AccountRPC from platformio.commands.home.rpc.handlers.app import", "urllib.parse import urlparse import click import uvicorn from starlette.applications import", "def shutdown_server(_=None): aio_get_running_loop().call_later(0.5, force_exit) return PlainTextResponse(\"Server has been shutdown!\") async", "import force_exit class ShutdownMiddleware: def __init__(self, app): self.app = app", "from starlette.status import HTTP_403_FORBIDDEN from platformio.commands.home.rpc.handlers.account import AccountRPC from platformio.commands.home.rpc.handlers.app", "+ \"__shutdown__\", shutdown_server, methods=[\"POST\"]), Mount(path, StaticFiles(directory=contrib_dir, html=True), name=\"static\"), ] if", "Mount(path, StaticFiles(directory=contrib_dir, html=True), name=\"static\"), ] if path != \"/\": routes.append(Route(\"/\",", "__call__(self, scope, receive, send): if scope[\"type\"] == \"http\" and b\"__shutdown__\"", "+ \"wsrpc\", ws_rpc_factory, name=\"wsrpc\"), Route(path + \"__shutdown__\", shutdown_server, methods=[\"POST\"]), Mount(path,", "\"__shutdown__\", shutdown_server, methods=[\"POST\"]), Mount(path, StaticFiles(directory=contrib_dir, html=True), name=\"static\"), ] if path", "platformio.exception import PlatformioException from platformio.package.manager.core import get_core_package_dir from platformio.proc import", "from starlette.staticfiles import StaticFiles from starlette.status import HTTP_403_FORBIDDEN from platformio.commands.home.rpc.handlers.account", "OR CONDITIONS OF ANY KIND, either express or implied. #", "governing permissions and # limitations under the License. import os", "platformio.commands.home.rpc.handlers.app import AppRPC from platformio.commands.home.rpc.handlers.ide import IDERPC from platformio.commands.home.rpc.handlers.misc import", "force_exit) return PlainTextResponse(\"Server has been shutdown!\") async def protected_page(_): return", "Ctrl+C to shutdown.\" ), lambda: None if no_open else click.launch(home_url),", "the License is distributed on an \"AS IS\" BASIS, #", "await self.app(scope, receive, send) async def shutdown_server(_=None): aio_get_running_loop().call_later(0.5, force_exit) return", "return PlainTextResponse(\"Server has been shutdown!\") async def protected_page(_): return PlainTextResponse(", "async def shutdown_server(_=None): aio_get_running_loop().call_later(0.5, force_exit) return PlainTextResponse(\"Server has been shutdown!\")", "HTTP_403_FORBIDDEN from platformio.commands.home.rpc.handlers.account import AccountRPC from platformio.commands.home.rpc.handlers.app import AppRPC from", "platformio.commands.home.rpc.handlers.misc import MiscRPC from platformio.commands.home.rpc.handlers.os import OSRPC from platformio.commands.home.rpc.handlers.piocore import", "starlette.applications import Starlette from starlette.middleware import Middleware from starlette.responses import", "Contrib\") ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\")", "shutdown_server() await self.app(scope, receive, send) async def shutdown_server(_=None): aio_get_running_loop().call_later(0.5, force_exit)", "= app async def __call__(self, scope, receive, send): if scope[\"type\"]", "law or agreed to in writing, software # distributed under", "import AppRPC from platformio.commands.home.rpc.handlers.ide import IDERPC from platformio.commands.home.rpc.handlers.misc import MiscRPC", "__init__(self, app): self.app = app async def __call__(self, scope, receive,", "scope.get(\"query_string\", {}): await shutdown_server() await self.app(scope, receive, send) async def", "StaticFiles from starlette.status import HTTP_403_FORBIDDEN from platformio.commands.home.rpc.handlers.account import AccountRPC from", "] if path != \"/\": routes.append(Route(\"/\", protected_page)) uvicorn.run( Starlette( middleware=[Middleware(ShutdownMiddleware)],", "import ProjectRPC from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory from platformio.compat import aio_get_running_loop", "may obtain a copy of the License at # #", "Home has been started. Press Ctrl+C to shutdown.\" ), lambda:", "from platformio.commands.home.rpc.handlers.app import AppRPC from platformio.commands.home.rpc.handlers.ide import IDERPC from platformio.commands.home.rpc.handlers.misc", "no_open, shutdown_timeout, home_url): contrib_dir = get_core_package_dir(\"contrib-piohome\") if not os.path.isdir(contrib_dir): raise", "platformio.commands.home.rpc.handlers.account import AccountRPC from platformio.commands.home.rpc.handlers.app import AppRPC from platformio.commands.home.rpc.handlers.ide import", "html=True), name=\"static\"), ] if path != \"/\": routes.append(Route(\"/\", protected_page)) uvicorn.run(", "== \"http\" and b\"__shutdown__\" in scope.get(\"query_string\", {}): await shutdown_server() await", "and b\"__shutdown__\" in scope.get(\"query_string\", {}): await shutdown_server() await self.app(scope, receive,", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "may not use this file except in compliance with the", "from platformio.exception import PlatformioException from platformio.package.manager.core import get_core_package_dir from platformio.proc", "started. Press Ctrl+C to shutdown.\" ), lambda: None if no_open", "namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\") path", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\")", "this file except in compliance with the License. # You", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "from urllib.parse import urlparse import click import uvicorn from starlette.applications", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "[ WebSocketRoute(path + \"wsrpc\", ws_rpc_factory, name=\"wsrpc\"), Route(path + \"__shutdown__\", shutdown_server,", "PlainTextResponse(\"Server has been shutdown!\") async def protected_page(_): return PlainTextResponse( \"Protected", "protected_page)) uvicorn.run( Starlette( middleware=[Middleware(ShutdownMiddleware)], routes=routes, on_startup=[ lambda: click.echo( \"PIO Home", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "ws_rpc_factory, name=\"wsrpc\"), Route(path + \"__shutdown__\", shutdown_server, methods=[\"POST\"]), Mount(path, StaticFiles(directory=contrib_dir, html=True),", "ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(),", "language governing permissions and # limitations under the License. import", "ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\") path = urlparse(home_url).path routes = [ WebSocketRoute(path +", "= WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\")", "async def protected_page(_): return PlainTextResponse( \"Protected PlatformIO Home session\", status_code=HTTP_403_FORBIDDEN", "status_code=HTTP_403_FORBIDDEN ) def run_server(host, port, no_open, shutdown_timeout, home_url): contrib_dir =", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "if no_open else click.launch(home_url), ], ), host=host, port=port, log_level=\"warning\", )", "namespace=\"project\") path = urlparse(home_url).path routes = [ WebSocketRoute(path + \"wsrpc\",", "starlette.middleware import Middleware from starlette.responses import PlainTextResponse from starlette.routing import", "or implied. # See the License for the specific language", "ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\")", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "shutdown.\" ), lambda: None if no_open else click.launch(home_url), ], ),", "import Middleware from starlette.responses import PlainTextResponse from starlette.routing import Mount,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC from platformio.commands.home.rpc.handlers.project import ProjectRPC from platformio.commands.home.rpc.server", "platformio.compat import aio_get_running_loop from platformio.exception import PlatformioException from platformio.package.manager.core import", "Route(path + \"__shutdown__\", shutdown_server, methods=[\"POST\"]), Mount(path, StaticFiles(directory=contrib_dir, html=True), name=\"static\"), ]", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "has been started. Press Ctrl+C to shutdown.\" ), lambda: None", "namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(),", "shutdown_timeout, home_url): contrib_dir = get_core_package_dir(\"contrib-piohome\") if not os.path.isdir(contrib_dir): raise PlatformioException(\"Invalid", "ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\") path = urlparse(home_url).path routes", "# # Unless required by applicable law or agreed to", "platformio.commands.home.rpc.handlers.os import OSRPC from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC from platformio.commands.home.rpc.handlers.project import", "OSRPC from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC from platformio.commands.home.rpc.handlers.project import ProjectRPC from", "platformio.package.manager.core import get_core_package_dir from platformio.proc import force_exit class ShutdownMiddleware: def", "raise PlatformioException(\"Invalid path to PIO Home Contrib\") ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout)", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Mount, Route, WebSocketRoute from starlette.staticfiles import StaticFiles from starlette.status import", "Version 2.0 (the \"License\"); # you may not use this", "PIO Home Contrib\") ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\")", "implied. # See the License for the specific language governing", "Route, WebSocketRoute from starlette.staticfiles import StaticFiles from starlette.status import HTTP_403_FORBIDDEN", "under the Apache License, Version 2.0 (the \"License\"); # you", "receive, send) async def shutdown_server(_=None): aio_get_running_loop().call_later(0.5, force_exit) return PlainTextResponse(\"Server has", "\"PIO Home has been started. Press Ctrl+C to shutdown.\" ),", "None if no_open else click.launch(home_url), ], ), host=host, port=port, log_level=\"warning\",", "uvicorn from starlette.applications import Starlette from starlette.middleware import Middleware from", "by applicable law or agreed to in writing, software #", "import urlparse import click import uvicorn from starlette.applications import Starlette", "ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\")", "from platformio.compat import aio_get_running_loop from platformio.exception import PlatformioException from platformio.package.manager.core", "force_exit class ShutdownMiddleware: def __init__(self, app): self.app = app async", "def __call__(self, scope, receive, send): if scope[\"type\"] == \"http\" and", "import PlatformioException from platformio.package.manager.core import get_core_package_dir from platformio.proc import force_exit", "b\"__shutdown__\" in scope.get(\"query_string\", {}): await shutdown_server() await self.app(scope, receive, send)", "= get_core_package_dir(\"contrib-piohome\") if not os.path.isdir(contrib_dir): raise PlatformioException(\"Invalid path to PIO", "\"Protected PlatformIO Home session\", status_code=HTTP_403_FORBIDDEN ) def run_server(host, port, no_open,", "middleware=[Middleware(ShutdownMiddleware)], routes=routes, on_startup=[ lambda: click.echo( \"PIO Home has been started.", "shutdown!\") async def protected_page(_): return PlainTextResponse( \"Protected PlatformIO Home session\",", "Copyright (c) 2014-present PlatformIO <<EMAIL>> # # Licensed under the", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "AppRPC from platformio.commands.home.rpc.handlers.ide import IDERPC from platformio.commands.home.rpc.handlers.misc import MiscRPC from", "uvicorn.run( Starlette( middleware=[Middleware(ShutdownMiddleware)], routes=routes, on_startup=[ lambda: click.echo( \"PIO Home has", "scope[\"type\"] == \"http\" and b\"__shutdown__\" in scope.get(\"query_string\", {}): await shutdown_server()", "the specific language governing permissions and # limitations under the", "# Copyright (c) 2014-present PlatformIO <<EMAIL>> # # Licensed under", "applicable law or agreed to in writing, software # distributed", "in writing, software # distributed under the License is distributed", "import IDERPC from platformio.commands.home.rpc.handlers.misc import MiscRPC from platformio.commands.home.rpc.handlers.os import OSRPC", "import os from urllib.parse import urlparse import click import uvicorn", "the License. import os from urllib.parse import urlparse import click", "async def __call__(self, scope, receive, send): if scope[\"type\"] == \"http\"", "class ShutdownMiddleware: def __init__(self, app): self.app = app async def", "from platformio.commands.home.rpc.handlers.project import ProjectRPC from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory from platformio.compat", "= [ WebSocketRoute(path + \"wsrpc\", ws_rpc_factory, name=\"wsrpc\"), Route(path + \"__shutdown__\",", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "Home Contrib\") ws_rpc_factory = WebSocketJSONRPCServerFactory(shutdown_timeout) ws_rpc_factory.addObjectHandler(AccountRPC(), namespace=\"account\") ws_rpc_factory.addObjectHandler(AppRPC(), namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(),", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "click import uvicorn from starlette.applications import Starlette from starlette.middleware import", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "name=\"wsrpc\"), Route(path + \"__shutdown__\", shutdown_server, methods=[\"POST\"]), Mount(path, StaticFiles(directory=contrib_dir, html=True), name=\"static\"),", "run_server(host, port, no_open, shutdown_timeout, home_url): contrib_dir = get_core_package_dir(\"contrib-piohome\") if not", "app async def __call__(self, scope, receive, send): if scope[\"type\"] ==", "PlatformioException from platformio.package.manager.core import get_core_package_dir from platformio.proc import force_exit class", "the License for the specific language governing permissions and #", "click.echo( \"PIO Home has been started. Press Ctrl+C to shutdown.\"", "from platformio.commands.home.rpc.handlers.misc import MiscRPC from platformio.commands.home.rpc.handlers.os import OSRPC from platformio.commands.home.rpc.handlers.piocore", "Apache License, Version 2.0 (the \"License\"); # you may not", "return PlainTextResponse( \"Protected PlatformIO Home session\", status_code=HTTP_403_FORBIDDEN ) def run_server(host,", "Starlette from starlette.middleware import Middleware from starlette.responses import PlainTextResponse from", "either express or implied. # See the License for the", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "platformio.commands.home.rpc.handlers.ide import IDERPC from platformio.commands.home.rpc.handlers.misc import MiscRPC from platformio.commands.home.rpc.handlers.os import", "from platformio.commands.home.rpc.handlers.ide import IDERPC from platformio.commands.home.rpc.handlers.misc import MiscRPC from platformio.commands.home.rpc.handlers.os", "receive, send): if scope[\"type\"] == \"http\" and b\"__shutdown__\" in scope.get(\"query_string\",", "IDERPC from platformio.commands.home.rpc.handlers.misc import MiscRPC from platformio.commands.home.rpc.handlers.os import OSRPC from", "ProjectRPC from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory from platformio.compat import aio_get_running_loop from", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "import Starlette from starlette.middleware import Middleware from starlette.responses import PlainTextResponse", "self.app(scope, receive, send) async def shutdown_server(_=None): aio_get_running_loop().call_later(0.5, force_exit) return PlainTextResponse(\"Server", "), lambda: None if no_open else click.launch(home_url), ], ), host=host,", "if scope[\"type\"] == \"http\" and b\"__shutdown__\" in scope.get(\"query_string\", {}): await", "send) async def shutdown_server(_=None): aio_get_running_loop().call_later(0.5, force_exit) return PlainTextResponse(\"Server has been", "session\", status_code=HTTP_403_FORBIDDEN ) def run_server(host, port, no_open, shutdown_timeout, home_url): contrib_dir", "os.path.isdir(contrib_dir): raise PlatformioException(\"Invalid path to PIO Home Contrib\") ws_rpc_factory =", "namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(), namespace=\"project\") path = urlparse(home_url).path routes =", "WebSocketRoute(path + \"wsrpc\", ws_rpc_factory, name=\"wsrpc\"), Route(path + \"__shutdown__\", shutdown_server, methods=[\"POST\"]),", "from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory from platformio.compat import aio_get_running_loop from platformio.exception", "lambda: click.echo( \"PIO Home has been started. Press Ctrl+C to", "namespace=\"app\") ws_rpc_factory.addObjectHandler(IDERPC(), namespace=\"ide\") ws_rpc_factory.addObjectHandler(MiscRPC(), namespace=\"misc\") ws_rpc_factory.addObjectHandler(OSRPC(), namespace=\"os\") ws_rpc_factory.addObjectHandler(PIOCoreRPC(), namespace=\"core\") ws_rpc_factory.addObjectHandler(ProjectRPC(),", "from platformio.proc import force_exit class ShutdownMiddleware: def __init__(self, app): self.app", "\"License\"); # you may not use this file except in", "name=\"static\"), ] if path != \"/\": routes.append(Route(\"/\", protected_page)) uvicorn.run( Starlette(", "to shutdown.\" ), lambda: None if no_open else click.launch(home_url), ],", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "from starlette.responses import PlainTextResponse from starlette.routing import Mount, Route, WebSocketRoute", "shutdown_server, methods=[\"POST\"]), Mount(path, StaticFiles(directory=contrib_dir, html=True), name=\"static\"), ] if path !=", "been shutdown!\") async def protected_page(_): return PlainTextResponse( \"Protected PlatformIO Home", "routes = [ WebSocketRoute(path + \"wsrpc\", ws_rpc_factory, name=\"wsrpc\"), Route(path +", "# distributed under the License is distributed on an \"AS", "platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC from platformio.commands.home.rpc.handlers.project import ProjectRPC from platformio.commands.home.rpc.server import", "def protected_page(_): return PlainTextResponse( \"Protected PlatformIO Home session\", status_code=HTTP_403_FORBIDDEN )", "# Unless required by applicable law or agreed to in", "under the License. import os from urllib.parse import urlparse import", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "scope, receive, send): if scope[\"type\"] == \"http\" and b\"__shutdown__\" in", "import HTTP_403_FORBIDDEN from platformio.commands.home.rpc.handlers.account import AccountRPC from platformio.commands.home.rpc.handlers.app import AppRPC", "You may obtain a copy of the License at #", "from starlette.applications import Starlette from starlette.middleware import Middleware from starlette.responses", "contrib_dir = get_core_package_dir(\"contrib-piohome\") if not os.path.isdir(contrib_dir): raise PlatformioException(\"Invalid path to", "\"http\" and b\"__shutdown__\" in scope.get(\"query_string\", {}): await shutdown_server() await self.app(scope,", "the Apache License, Version 2.0 (the \"License\"); # you may", "PIOCoreRPC from platformio.commands.home.rpc.handlers.project import ProjectRPC from platformio.commands.home.rpc.server import WebSocketJSONRPCServerFactory from", "import get_core_package_dir from platformio.proc import force_exit class ShutdownMiddleware: def __init__(self,", "import AccountRPC from platformio.commands.home.rpc.handlers.app import AppRPC from platformio.commands.home.rpc.handlers.ide import IDERPC", "PlatformIO Home session\", status_code=HTTP_403_FORBIDDEN ) def run_server(host, port, no_open, shutdown_timeout," ]
[ "ALREADY_RECLAIMED = 3 # Request couldn't be processed in time.", "epoch in UTC. lease_expiration_ts = messages.IntegerField(8) class CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents a", "a request to voluntarily cancel a LeaseRequest.\"\"\" # Per-user unique", "to a catalog manipulation request.\"\"\" # CatalogManipulationRequestError instance indicating an", "by the LeaseReleaseRequest. client_request_id = messages.StringField(3, required=True) class BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents", "regarding this request. pubsub_project = messages.StringField(5) # Instructions to give", "Request couldn't be processed in time. DEADLINE_EXCEEDED = 4 #", "to generate the LeaseRequest. client_request_id = messages.StringField(3, required=True) # State", "required=True) # Policies instance specifying machine-specific configuration. policies = messages.MessageField(Policies,", "from epoch in UTC. lease_expiration_ts = messages.IntegerField(8) class CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents", "Pub/Sub topic. UNSPECIFIED_TOPIC = 8 # Attempted to delete a", "= 7 # Proposed duration was zero or negative. NONPOSITIVE_DEADLINE", "= 3 class LeaseResponse(messages.Message): \"\"\"Represents a response to a LeaseRequest.\"\"\"", "request whose machine was # already reclaimed. ALREADY_RECLAIMED = 2", "CatalogManipulationRequestError instance indicating an error with the # request, or", "# Request ID referred to non-existent request for this user.", "machine. instruction = messages.MessageField(Instruction, 1) # State of the instruction.", "CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents a request to retrieve a machine from the", "= messages.MessageField( CatalogMachineDeletionRequest, 3) class CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents a response to", "LEASED = 9 class CatalogManipulationResponse(messages.Message): \"\"\"Represents a response to a", "expiration seconds from epoch in UTC. lease_expiration_ts = messages.IntegerField(6) class", "backend, hostnames must be unique in the catalog. HOSTNAME_REUSE =", "Didn't specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 8 #", "to the catalog. dimensions.backend must be specified. dimensions.hostname must be", "to generate the LeaseRequest # referred to by the LeaseReleaseRequest.", "messages.MessageField( CatalogMachineAdditionRequest, 2) # CatalogMachineDeletionRequest this response is in reference", "manipulation request.\"\"\" responses = messages.MessageField( CatalogManipulationResponse, 1, repeated=True) class LeaseRequest(messages.Message):", "request_hash = messages.StringField(1) # LeaseReleaseRequestError indicating an error with the", "instruction. state = messages.StringField(2) class AckRequest(messages.Message): \"\"\"Represents a request to", "given to the machine. instruction = messages.MessageField(Instruction, 1) # State", "2) # CatalogMachineDeletionRequest this response is in reference to. machine_deletion_request", "messages.StringField(1, required=True) class BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents a batched set of lease", "this request. pubsub_topic = messages.StringField(4) # Cloud Pub/Sub project name", "components.machine_provider.instructions import * from components.machine_provider.policies import * class CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents", "fulfilled request whose machine was # already reclaimed. ALREADY_RECLAIMED =", "time is not in the future. LEASE_EXPIRATION_TS_ERROR = 9 #", "to a LeaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest this response", "a request for instructions given to a machine.\"\"\" # Instruction", "= messages.MessageField(Instruction, 6) # UTC seconds from epoch when lease", "found in the LICENSE file. \"\"\"Messages for the Machine Provider", "to. request_hash = messages.StringField(1) # LeaseReleaseRequestError indicating an error with", "ID used to deduplicate requests. request_id = messages.StringField(1, required=True) #", "# MachineInstructionError indicating an error with the request, or None", "lease duration is too long. LEASE_TOO_LONG = 11 class LeaseRequestState(messages.Enum):", "4) # Hostname of the machine available for this request.", "batched catalog manipulation request.\"\"\" responses = messages.MessageField( CatalogManipulationResponse, 1, repeated=True)", "= messages.MessageField(Dimensions, 1) # Policies governing this machine. policies =", "retrieval request.\"\"\" # Dimensions instance specifying what sort of machine", "be unique. # Reusing a request ID in a different", "ID in a different request is an error. REQUEST_ID_REUSE =", "request.\"\"\" # CatalogManipulationRequestError instance indicating an error with the #", "machine. backend = messages.EnumField(Backend, 2) class CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents a response", "class CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents a response to a batched catalog manipulation", "messages.StringField(1, required=True) # Backend the machine belongs to. backend =", "Apache License, Version 2.0 # that can be found in", "7 # Proposed duration was zero or negative. NONPOSITIVE_DEADLINE =", "was zero or negative. NONPOSITIVE_DEADLINE = 8 # Proposed expiration", "\"\"\"Represents a response to a catalog machine retrieval request.\"\"\" #", "# referred to by the LeaseReleaseRequest. client_request_id = messages.StringField(3, required=True)", "pubsub_subscription = messages.StringField(4) # Project the Cloud Pub/Sub subscription exists", "= messages.MessageField(LeaseResponse, 1, repeated=True) class LeaseReleaseRequest(messages.Message): \"\"\"Represents a request to", "client to generate the LeaseRequest # referred to by the", "None # if there is no error. error = messages.EnumField(LeaseRequestError,", "is an error. REQUEST_ID_REUSE = 1 # Proposed Cloud Pub/Sub", "request_id = messages.StringField(1, required=True) # Dimensions instance specifying what sort", "a batched catalog manipulation request.\"\"\" responses = messages.MessageField( CatalogManipulationResponse, 1,", "seconds from epoch in UTC. lease_expiration_ts = messages.IntegerField(6) class BatchedLeaseResponse(messages.Message):", "a response to a batched catalog manipulation request.\"\"\" responses =", "= messages.MessageField(Dimensions, 2, required=True) # Desired length of the lease", "= 3 class MachineInstructionResponse(messages.Message): \"\"\"Represents a response to a MachineInstructionRequest.\"\"\"", "= messages.IntegerField(8) class CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents a request to add a", "this is. dimensions = messages.MessageField(Dimensions, 1) # Policies governing this", "= 1 # Request ID referred to an unfulfilled request.", "# Request ID used by the client to generate the", "of the machine available for this request. hostname = messages.StringField(5)", "required=True) # Instruction to send the leased machine. instruction =", "backend didn't match the backend originating the request. MISMATCHED_BACKEND =", "used by the client to generate the LeaseRequest # referred", "pubsub_topic = messages.StringField(6) # Project the Cloud Pub/Sub topic exists", "messages.StringField(1) # LeaseRequestError instance indicating an error with the request,", "a machine.\"\"\" # Instruction given to the machine. instruction =", "# Dimensions instance specifying what sort of machine this is.", "3 class MachineInstructionResponse(messages.Message): \"\"\"Represents a response to a MachineInstructionRequest.\"\"\" #", "# Attempted to delete a leased machine. LEASED = 9", "class MachineInstructionError(messages.Enum): \"\"\"Represents an error in a MachineInstructionRequest.\"\"\" # Request", "retrieve. hostname = messages.StringField(1, required=True) # Backend which added the", "= 2 # Invalid instruction for the machine. INVALID_INSTRUCTION =", "must listen to for instructions. pubsub_subscription = messages.StringField(4) # Project", "response refers to. request_hash = messages.StringField(1) # LeaseRequestError instance indicating", "a batched set of lease release requests.\"\"\" responses = messages.MessageField(LeaseReleaseResponse,", "a response to a request for instructions given to a", "used by the client to generate the LeaseRequest for the", "machine retrieval request.\"\"\" # Dimensions instance specifying what sort of", "request to add a machine to the catalog. dimensions.backend must", "BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents responses to a batched set of lease release", "CatalogMachineAdditionRequest, 1, repeated=True) class CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents a request to delete", "= messages.EnumField(CatalogManipulationRequestError, 1) # CatalogMachineAdditionRequest this response is in reference", "of CatalogMachineAdditionRequests. dimensions.backend must be specified in each CatalogMachineAdditionRequest. dimensions.hostname", "to the machine. instruction = messages.MessageField(Instruction, 1) # State of", "# if there is no error. error = messages.EnumField(LeaseReleaseRequestError, 2)", "# already reclaimed. ALREADY_RECLAIMED = 3 # Request couldn't be", "messages.StringField(1, required=True) # Dimensions instance specifying what sort of machine", "error. error = messages.EnumField(MachineInstructionError, 2) class PollRequest(messages.Message): \"\"\"Represents a request", "in a LeaseRequest.\"\"\" # Request IDs are intended to be", "is. dimensions = messages.MessageField(Dimensions, 1, required=True) class CatalogManipulationRequestError(messages.Enum): \"\"\"Represents an", "request.\"\"\" responses = messages.MessageField( CatalogManipulationResponse, 1, repeated=True) class LeaseRequest(messages.Message): \"\"\"Represents", "specifying what sort of machine to lease. dimensions = messages.MessageField(Dimensions,", "match the backend originating the request. MISMATCHED_BACKEND = 4 #", "is. dimensions = messages.MessageField(Dimensions, 1, required=True) # Policies instance specifying", "a LeaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest this response refers", "State of the LeaseRequest. state = messages.EnumField(LeaseRequestState, 4) # Hostname", "response is in reference to. machine_addition_request = messages.MessageField( CatalogMachineAdditionRequest, 2)", "# Hostname of the machine whose instruction to ack. hostname", "specify a backend. UNSPECIFIED_BACKEND = 3 # Specified backend didn't", "CatalogMachineDeletionRequest this response is in reference to. machine_deletion_request = messages.MessageField(", "request for this user. NOT_FOUND = 1 # Request ID", "to a batched catalog manipulation request.\"\"\" responses = messages.MessageField( CatalogManipulationResponse,", "Request ID used by the client to generate the LeaseRequest.", "request whose machine was # already reclaimed. ALREADY_RECLAIMED = 3", "Miscellaneous transient error. TRANSIENT_ERROR = 5 class LeaseReleaseResponse(messages.Message): \"\"\"Represents a", "a machine from the catalog.\"\"\" # Hostname of the machine", "machine_deletion_request = messages.MessageField( CatalogMachineDeletionRequest, 3) class CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents a response", "specified. LEASE_LENGTH_UNSPECIFIED = 10 # Requested lease duration is too", "messages.EnumField(LeaseRequestState, 4) # Hostname of the machine available for this", "the Cloud Pub/Sub subscription exists in. pubsub_subscription_project = messages.StringField(5) #", "1, repeated=True) class CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents a request to delete a", "identifying the LeaseRequest this response refers to. request_hash = messages.StringField(1)", "License, Version 2.0 # that can be found in the", "Request ID for the fulfilled LeaseRequest whose machine should be", "Mutually exclusive duration and lease_expiration_ts both specified. MUTUAL_EXCLUSION_ERROR = 7", "= 6 # Proposed Cloud Pub/Sub project was invalid. INVALID_PROJECT", "the request, or None # if there is no error.", "file. \"\"\"Messages for the Machine Provider API.\"\"\" # pylint: disable=unused-wildcard-import,", "this response is in reference to. machine_deletion_request = messages.MessageField( CatalogMachineDeletionRequest,", "communicate on regarding this request. pubsub_topic = messages.StringField(4) # Cloud", "= messages.StringField(6) # Project the Cloud Pub/Sub topic exists in.", "machine available for this request. hostname = messages.StringField(5) # Timestamp", "# Hostname of the machine whose instructions to retrieve. hostname", "client to generate the LeaseRequest for the # machine being", "1, repeated=True) class LeaseRequest(messages.Message): \"\"\"Represents a request for a lease", "MUTUAL_EXCLUSION_ERROR = 7 # Proposed duration was zero or negative.", "topic exists in. pubsub_topic_project = messages.StringField(7) # Timestamp indicating lease", "messages.StringField(2) class AckRequest(messages.Message): \"\"\"Represents a request to ack an instruction", "0 # LeaseRequest is pending provisioning of additional capacity. PENDING", "MISMATCHED_BACKEND = 4 # Didn't specify a hostname. UNSPECIFIED_HOSTNAME =", "pylint: disable=unused-wildcard-import, wildcard-import from protorpc import messages from components.machine_provider.dimensions import", "expiration time is not in the future. LEASE_EXPIRATION_TS_ERROR = 9", "processed in time. DEADLINE_EXCEEDED = 5 # Miscellaneous transient error.", "Neither duration nor lease_expiration_ts were specified. LEASE_LENGTH_UNSPECIFIED = 10 #", "invalid. INVALID_TOPIC = 6 # Proposed Cloud Pub/Sub project was", "3 # Didn't specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC =", "deduplicate requests. request_id = messages.StringField(1, required=True) # Dimensions instance specifying", "instructed. client_request_id = messages.StringField(1, required=True) # MachineInstructionError indicating an error", "instance specifying machine-specific configuration. policies = messages.MessageField(Policies, 2, required=True) class", "invalid. INVALID_PROJECT = 7 # Didn't specify a Cloud Pub/Sub", "Cloud Pub/Sub topic was invalid. INVALID_TOPIC = 6 # Proposed", "hostname = messages.StringField(1, required=True) # Backend which added the machine.", "messages.IntegerField(7) class BatchedLeaseRequest(messages.Message): \"\"\"Represents a batched set of LeaseRequests.\"\"\" #", "be specified in each CatalogMachineAdditionRequest. dimensions.hostname must be unique per", "2, required=True) class CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents a batched set of CatalogMachineAdditionRequests.", "for the Machine Provider API.\"\"\" # pylint: disable=unused-wildcard-import, wildcard-import from", "the leased machine. instruction = messages.MessageField(Instruction, 2) class MachineInstructionError(messages.Enum): \"\"\"Represents", "2) class MachineInstructionError(messages.Enum): \"\"\"Represents an error in a MachineInstructionRequest.\"\"\" #", "of this source code is governed under the Apache License,", "machine. LEASED = 9 class CatalogManipulationResponse(messages.Message): \"\"\"Represents a response to", "there is no error. error = messages.EnumField(CatalogManipulationRequestError, 1) # CatalogMachineAdditionRequest", "unique ID used to identify the LeaseRequest. request_id = messages.StringField(1,", "\"\"\"Represents a response to a batched lease request.\"\"\" responses =", "for this request. hostname = messages.StringField(5) # Timestamp indicating lease", "2 # LeaseRequest has been denied. DENIED = 3 class", "dimensions.backend must be specified in each CatalogMachineAdditionRequest. dimensions.hostname must be", "messages.MessageField(Policies, 2, required=True) class CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents a batched set of", "messages.MessageField( CatalogMachineDeletionRequest, 3) class CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents a response to a", "from protorpc import messages from components.machine_provider.dimensions import * from components.machine_provider.instructions", "lease in seconds. duration = messages.IntegerField(3) # Cloud Pub/Sub topic", "machine whose instructions to retrieve. hostname = messages.StringField(1, required=True) #", "on regarding this request. pubsub_project = messages.StringField(5) # Instructions to", "both specified. MUTUAL_EXCLUSION_ERROR = 7 # Proposed duration was zero", "machine must listen to for instructions. pubsub_subscription = messages.StringField(4) #", "be processed in time. DEADLINE_EXCEEDED = 4 # Miscellaneous transient", "a response to a LeaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest", "messages.StringField(5) # Timestamp indicating lease expiration seconds from epoch in", "communicate on regarding this request. pubsub_project = messages.StringField(5) # Instructions", "the CatalogMachineEntry. state = messages.StringField(3) # Cloud Pub/Sub subscription the", "CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents a response to a batched catalog manipulation request.\"\"\"", "be # instructed. request_id = messages.StringField(1, required=True) # Instruction to", "policies = messages.MessageField(Policies, 2, required=True) class CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents a batched", "messages.MessageField(Instruction, 1) # State of the instruction. state = messages.StringField(2)", "of the machine to retrieve. hostname = messages.StringField(1, required=True) #", "FULFILLED = 2 # LeaseRequest has been denied. DENIED =", "pubsub_project = messages.StringField(5) # Instructions to give the machine once", "what sort of machine this is. dimensions = messages.MessageField(Dimensions, 1,", "# Desired length of the lease in seconds. duration =", "is too long. LEASE_TOO_LONG = 11 class LeaseRequestState(messages.Enum): \"\"\"Represents the", "the client to generate the LeaseRequest. client_request_id = messages.StringField(3, required=True)", "import * from components.machine_provider.instructions import * from components.machine_provider.policies import *", "10 # Requested lease duration is too long. LEASE_TOO_LONG =", "# Policies governing this machine. policies = messages.MessageField(Policies, 2) #", "Use of this source code is governed under the Apache", "= messages.MessageField(LeaseRequest, 1, repeated=True) class LeaseRequestError(messages.Enum): \"\"\"Represents an error in", "to send the leased machine. instruction = messages.MessageField(Instruction, 2) class", "LeaseRequestError instance indicating an error with the request, or None", "in the catalog. HOSTNAME_REUSE = 1 # Tried to lookup", "= messages.EnumField(LeaseReleaseRequestError, 2) # Request ID used by the client", "class LeaseResponse(messages.Message): \"\"\"Represents a response to a LeaseRequest.\"\"\" # SHA-1", "fulfilled LeaseRequest whose machine should be # instructed. request_id =", "None # if there is no error. error = messages.EnumField(LeaseReleaseRequestError,", "no error. error = messages.EnumField(LeaseReleaseRequestError, 2) # Request ID used", "this response is in reference to. machine_addition_request = messages.MessageField( CatalogMachineAdditionRequest,", "AckRequest(messages.Message): \"\"\"Represents a request to ack an instruction received by", "required=True) # State of the LeaseRequest. state = messages.EnumField(LeaseRequestState, 4)", "\"\"\"Represents a request to ack an instruction received by a", "to a request for instructions given to a machine.\"\"\" #", "lease on a machine.\"\"\" # Per-user unique ID used to", "dimensions = messages.MessageField(Dimensions, 1, required=True) class CatalogManipulationRequestError(messages.Enum): \"\"\"Represents an error", "project name to communicate on regarding this request. pubsub_project =", "ID referred to non-existent request for this user. NOT_FOUND =", "\"\"\"Messages for the Machine Provider API.\"\"\" # pylint: disable=unused-wildcard-import, wildcard-import", "LEASE_EXPIRATION_TS_ERROR = 9 # Neither duration nor lease_expiration_ts were specified.", "a request for a lease on a machine.\"\"\" # Per-user", "exists in. pubsub_topic_project = messages.StringField(7) # Timestamp indicating lease expiration", "= messages.MessageField(LeaseReleaseResponse, 1, repeated=True) class MachineInstructionRequest(messages.Message): \"\"\"Represents a request to", "in a LeaseReleaseRequest.\"\"\" # Request ID referred to non-existent request", "ID used by the client to generate the LeaseRequest #", "Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 4 # Request couldn't be", "class CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents a request to delete a machine in", "generate the LeaseRequest for the # machine being instructed. client_request_id", "\"\"\"Represents an error in a MachineInstructionRequest.\"\"\" # Request ID referred", "sort of machine this is. dimensions = messages.MessageField(Dimensions, 1) #", "Reusing a request ID in a different request is an", "1, repeated=True) class LeaseReleaseRequest(messages.Message): \"\"\"Represents a request to voluntarily cancel", "* from components.machine_provider.instructions import * from components.machine_provider.policies import * class", "invalid. INVALID_PROJECT = 3 # Didn't specify a Cloud Pub/Sub", "# Per backend, hostnames must be unique in the catalog.", "\"\"\"Represents a response to a batched catalog manipulation request.\"\"\" responses", "given to a machine.\"\"\" # Hostname of the machine whose", "topic. UNSPECIFIED_TOPIC = 4 # Request couldn't be processed in", "Proposed duration was zero or negative. NONPOSITIVE_DEADLINE = 8 #", "together. requests = messages.MessageField( CatalogMachineAdditionRequest, 1, repeated=True) class CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents", "on regarding this request. pubsub_topic = messages.StringField(4) # Cloud Pub/Sub", "machine from the catalog.\"\"\" # Hostname of the machine to", "the LICENSE file. \"\"\"Messages for the Machine Provider API.\"\"\" #", "\"\"\"Represents an error in a catalog manipulation request.\"\"\" # Per", "for the # machine being instructed. client_request_id = messages.StringField(1, required=True)", "is no error. error = messages.EnumField(CatalogManipulationRequestError, 1) # CatalogMachineAdditionRequest this", "messages.MessageField( CatalogMachineAdditionRequest, 1, repeated=True) class CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents a request to", "Copyright 2015 The LUCI Authors. All rights reserved. # Use", "referred to an unfulfilled request. NOT_FULFILLED = 2 # Request", "to lease. dimensions = messages.MessageField(Dimensions, 2, required=True) # Desired length", "in time. DEADLINE_EXCEEDED = 5 # Miscellaneous transient error. TRANSIENT_ERROR", "Hostname of the machine available for this request. hostname =", "a machine.\"\"\" # Per-user unique ID used to deduplicate requests.", "# Request ID referred to an unfulfilled request. NOT_FULFILLED =", "to give the machine once it's been leased. on_lease =", "the LeaseReleaseRequest. client_request_id = messages.StringField(3, required=True) class BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents responses", "instruction to ack. hostname = messages.StringField(1, required=True) # Backend the", "ID referred to an unfulfilled request. NOT_FULFILLED = 2 #", "Cloud Pub/Sub topic exists in. pubsub_topic_project = messages.StringField(7) # Timestamp", "future. LEASE_EXPIRATION_TS_ERROR = 9 # Neither duration nor lease_expiration_ts were", "time. DEADLINE_EXCEEDED = 5 # Miscellaneous transient error. TRANSIENT_ERROR =", "error with the request, or None # if there is", "TRANSIENT_ERROR = 5 class LeaseReleaseResponse(messages.Message): \"\"\"Represents a response to a", "the LeaseRequest for the # machine being instructed. client_request_id =", "was invalid. INVALID_TOPIC = 6 # Proposed Cloud Pub/Sub project", "of a LeaseRequest.\"\"\" # LeaseRequest has been received, but not", "state = messages.EnumField(LeaseRequestState, 4) # Hostname of the machine available", "with the # request, or None if there is no", "instance indicating an error with the request, or None #", "this request. pubsub_project = messages.StringField(5) # Instructions to give the", "subscription the machine must listen to for instructions. pubsub_subscription =", "# SHA-1 identifying the LeaseRequest this response refers to. request_hash", "\"\"\"Represents responses to a batched set of lease release requests.\"\"\"", "LeaseRequest has been received, but not processed yet. UNTRIAGED =", "# Backend which added the machine. backend = messages.EnumField(Backend, 2)", "be found in the LICENSE file. \"\"\"Messages for the Machine", "this user. NOT_FOUND = 1 # Request ID referred to", "of machine this is. dimensions = messages.MessageField(Dimensions, 1, required=True) class", "# Request ID referred to a fulfilled request whose machine", "in. pubsub_subscription_project = messages.StringField(5) # Cloud Pub/Sub topic the machine", "a response to a MachineInstructionRequest.\"\"\" # Request ID used by", "repeated=True) class LeaseRequest(messages.Message): \"\"\"Represents a request for a lease on", "topic name to communicate on regarding this request. pubsub_topic =", "messages.StringField(3, required=True) # State of the LeaseRequest. state = messages.EnumField(LeaseRequestState,", "MachineInstructionRequest.\"\"\" # Request ID used by the client to generate", "request_id = messages.StringField(1, required=True) class BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents a batched set", "5 # Proposed Cloud Pub/Sub topic was invalid. INVALID_TOPIC =", "messages.EnumField(LeaseRequestError, 2) # Request ID used by the client to", "backend = messages.EnumField(Backend, 2) class PollResponse(messages.Message): \"\"\"Represents a response to", "what sort of machine to lease. dimensions = messages.MessageField(Dimensions, 2,", "lease should expire. lease_expiration_ts = messages.IntegerField(7) class BatchedLeaseRequest(messages.Message): \"\"\"Represents a", "BatchedLeaseResponse(messages.Message): \"\"\"Represents a response to a batched lease request.\"\"\" responses", "referred to an unfulfilled request. NOT_FULFILLED = 1 # Request", "a machine to the catalog. dimensions.backend must be specified. dimensions.hostname", "request to retrieve a machine from the catalog.\"\"\" # Hostname", "give the machine once it's been leased. on_lease = messages.MessageField(Instruction,", "CatalogMachineAdditionRequest, 2) # CatalogMachineDeletionRequest this response is in reference to.", "LeaseReleaseRequest. client_request_id = messages.StringField(3, required=True) class BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents responses to", "TRANSIENT_ERROR = 6 # Mutually exclusive duration and lease_expiration_ts both", "= messages.StringField(4) # Project the Cloud Pub/Sub subscription exists in.", "to retrieve. hostname = messages.StringField(1, required=True) # Backend the machine", "epoch when lease should expire. lease_expiration_ts = messages.IntegerField(7) class BatchedLeaseRequest(messages.Message):", "by the client to generate the LeaseRequest # referred to", "request to ack an instruction received by a machine.\"\"\" #", "of machine to lease. dimensions = messages.MessageField(Dimensions, 2, required=True) #", "instructions to retrieve. hostname = messages.StringField(1, required=True) # Backend the", "messages.StringField(1, required=True) # Backend which added the machine. backend =", "governing this machine. policies = messages.MessageField(Policies, 2) # State of", "unfulfilled request. NOT_FULFILLED = 1 # Request ID referred to", "voluntarily cancel a LeaseRequest.\"\"\" # Per-user unique ID used to", "pubsub_topic = messages.StringField(4) # Cloud Pub/Sub project name to communicate", "to. Generally required. backend = messages.EnumField(Backend, 2) class PollResponse(messages.Message): \"\"\"Represents", "a catalog manipulation request.\"\"\" # CatalogManipulationRequestError instance indicating an error", "LeaseReleaseRequest(messages.Message): \"\"\"Represents a request to voluntarily cancel a LeaseRequest.\"\"\" #", "messages.EnumField(LeaseReleaseRequestError, 2) # Request ID used by the client to", "error. error = messages.EnumField(LeaseReleaseRequestError, 2) # Request ID used by", "Cloud Pub/Sub subscription the machine must listen to for instructions.", "# Request couldn't be processed in time. DEADLINE_EXCEEDED = 4", "NOT_FULFILLED = 1 # Request ID referred to a fulfilled", "transient error. TRANSIENT_ERROR = 6 # Mutually exclusive duration and", "for this user. NOT_FOUND = 1 # Request ID referred", "the Cloud Pub/Sub topic exists in. pubsub_topic_project = messages.StringField(7) #", "= messages.MessageField(Instruction, 1) # State of the instruction. state =", "error = messages.EnumField(CatalogManipulationRequestError, 1) # CatalogMachineAdditionRequest this response is in", "machine.\"\"\" # Hostname of the machine whose instructions to retrieve.", "CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents a request to delete a machine in the", "set of lease release requests.\"\"\" requests = messages.MessageField(LeaseReleaseRequest, 1, repeated=True)", "backend. UNSPECIFIED_BACKEND = 3 # Specified backend didn't match the", "instructions given to a machine.\"\"\" # Hostname of the machine", "the catalog.\"\"\" # Dimensions instance specifying what sort of machine", "batched lease request.\"\"\" responses = messages.MessageField(LeaseResponse, 1, repeated=True) class LeaseReleaseRequest(messages.Message):", "an error in a MachineInstructionRequest.\"\"\" # Request ID referred to", "in the LICENSE file. \"\"\"Messages for the Machine Provider API.\"\"\"", "2 # Invalid instruction for the machine. INVALID_INSTRUCTION = 3", "lease release requests.\"\"\" requests = messages.MessageField(LeaseReleaseRequest, 1, repeated=True) class LeaseReleaseRequestError(messages.Enum):", "= messages.StringField(5) # Instructions to give the machine once it's", "a LeaseReleaseRequest.\"\"\" # Request ID referred to non-existent request for", "request.\"\"\" # Per backend, hostnames must be unique in the", "LeaseReleaseResponse(messages.Message): \"\"\"Represents a response to a LeaseReleaseRequest.\"\"\" # SHA-1 identifying", "poll for instructions given to a machine.\"\"\" # Hostname of", "not processed yet. UNTRIAGED = 0 # LeaseRequest is pending", "messages.MessageField(Instruction, 2) class MachineInstructionError(messages.Enum): \"\"\"Represents an error in a MachineInstructionRequest.\"\"\"", "unfulfilled request. NOT_FULFILLED = 2 # Request ID referred to", "couldn't be processed in time. DEADLINE_EXCEEDED = 4 # Miscellaneous", "duration nor lease_expiration_ts were specified. LEASE_LENGTH_UNSPECIFIED = 10 # Requested", "machine being instructed. client_request_id = messages.StringField(1, required=True) # MachineInstructionError indicating", "= 5 # Proposed Cloud Pub/Sub topic was invalid. INVALID_TOPIC", "# Timestamp indicating lease expiration seconds from epoch in UTC.", "per backend. \"\"\" # CatalogMachineAdditionRequest instances to batch together. requests", "the machine must be subscribed to. pubsub_topic = messages.StringField(6) #", "Miscellaneous transient error. TRANSIENT_ERROR = 6 # Mutually exclusive duration", "to an unfulfilled request. NOT_FULFILLED = 2 # Request ID", "a LeaseReleaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest this response refers", "hostname. UNSPECIFIED_HOSTNAME = 5 # Proposed Cloud Pub/Sub topic was", "rights reserved. # Use of this source code is governed", "required=True) # Dimensions instance specifying what sort of machine to", "# Didn't specify a hostname. UNSPECIFIED_HOSTNAME = 5 # Proposed", "UTC. lease_expiration_ts = messages.IntegerField(6) class BatchedLeaseResponse(messages.Message): \"\"\"Represents a response to", "= messages.EnumField(MachineInstructionError, 2) class PollRequest(messages.Message): \"\"\"Represents a request to poll", "an entry that didn't exist. ENTRY_NOT_FOUND = 2 # Didn't", "LeaseRequestError(messages.Enum): \"\"\"Represents an error in a LeaseRequest.\"\"\" # Request IDs", "Backend which added the machine. backend = messages.EnumField(Backend, 2) class", "= 1 # LeaseRequest has been fulfilled. FULFILLED = 2", "client_request_id = messages.StringField(3, required=True) class BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents responses to a", "to generate the LeaseRequest for the # machine being instructed.", "# already reclaimed. ALREADY_RECLAIMED = 2 # Invalid instruction for", "topic was invalid. INVALID_TOPIC = 6 # Proposed Cloud Pub/Sub", "Hostname of the machine whose instruction to ack. hostname =", "an instruction received by a machine.\"\"\" # Hostname of the", "pubsub_subscription_project = messages.StringField(5) # Cloud Pub/Sub topic the machine must", "= 2 # Request ID referred to a fulfilled request", "Request couldn't be processed in time. DEADLINE_EXCEEDED = 5 #", "is in reference to. machine_addition_request = messages.MessageField( CatalogMachineAdditionRequest, 2) #", "request to poll for instructions given to a machine.\"\"\" #", "already reclaimed. ALREADY_RECLAIMED = 2 # Invalid instruction for the", "\"\"\"Represents a request to poll for instructions given to a", "messages.MessageField(Dimensions, 1, required=True) # Policies instance specifying machine-specific configuration. policies", "= 10 # Requested lease duration is too long. LEASE_TOO_LONG", "3 # Specified backend didn't match the backend originating the", "# Instruction to send the leased machine. instruction = messages.MessageField(Instruction,", "Per-user unique ID used to deduplicate requests. request_id = messages.StringField(1,", "messages.StringField(5) # Cloud Pub/Sub topic the machine must be subscribed", "generate the LeaseRequest. client_request_id = messages.StringField(3, required=True) # State of", "= messages.StringField(5) # Cloud Pub/Sub topic the machine must be", "machine should be # instructed. request_id = messages.StringField(1, required=True) #", "INVALID_INSTRUCTION = 3 class MachineInstructionResponse(messages.Message): \"\"\"Represents a response to a", "request, or None if there is no error. error =", "UNTRIAGED = 0 # LeaseRequest is pending provisioning of additional", "requests = messages.MessageField( CatalogMachineAdditionRequest, 1, repeated=True) class CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents a", "CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents a response to a catalog machine retrieval request.\"\"\"", "a request to retrieve a machine from the catalog.\"\"\" #", "= 7 # Didn't specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC", "and lease_expiration_ts both specified. MUTUAL_EXCLUSION_ERROR = 7 # Proposed duration", "request to send an instruction to a leased machine.\"\"\" #", "the fulfilled LeaseRequest whose machine should be # instructed. request_id", "is governed under the Apache License, Version 2.0 # that", "available for this request. hostname = messages.StringField(5) # Timestamp indicating", "used to identify the LeaseRequest. request_id = messages.StringField(1, required=True) class", "instructed. request_id = messages.StringField(1, required=True) # Instruction to send the", "to. request_hash = messages.StringField(1) # LeaseRequestError instance indicating an error", "instruction received by a machine.\"\"\" # Hostname of the machine", "# Per-user unique ID used to deduplicate requests. request_id =", "to voluntarily cancel a LeaseRequest.\"\"\" # Per-user unique ID used", "= 6 # Mutually exclusive duration and lease_expiration_ts both specified.", "messages.IntegerField(6) class BatchedLeaseResponse(messages.Message): \"\"\"Represents a response to a batched lease", "machine belongs to. Generally required. backend = messages.EnumField(Backend, 2) class", "a different request is an error. REQUEST_ID_REUSE = 1 #", "= messages.IntegerField(3) # Cloud Pub/Sub topic name to communicate on", "in each CatalogMachineAdditionRequest. dimensions.hostname must be unique per backend. \"\"\"", "MachineInstructionResponse(messages.Message): \"\"\"Represents a response to a MachineInstructionRequest.\"\"\" # Request ID", "machine in the catalog.\"\"\" # Dimensions instance specifying what sort", "lease. dimensions = messages.MessageField(Dimensions, 2, required=True) # Desired length of", "class LeaseReleaseRequestError(messages.Enum): \"\"\"Represents an error in a LeaseReleaseRequest.\"\"\" # Request", "None # if there is no error. error = messages.EnumField(MachineInstructionError,", "is no error. error = messages.EnumField(LeaseReleaseRequestError, 2) # Request ID", "in UTC. lease_expiration_ts = messages.IntegerField(6) class BatchedLeaseResponse(messages.Message): \"\"\"Represents a response", "batched set of lease release requests.\"\"\" responses = messages.MessageField(LeaseReleaseResponse, 1,", "seconds from epoch when lease should expire. lease_expiration_ts = messages.IntegerField(7)", "listen to for instructions. pubsub_subscription = messages.StringField(4) # Project the", "not in the future. LEASE_EXPIRATION_TS_ERROR = 9 # Neither duration", "CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents a request to add a machine to the", "UNSPECIFIED_HOSTNAME = 5 # Proposed Cloud Pub/Sub topic was invalid.", "machine was # already reclaimed. ALREADY_RECLAIMED = 3 # Request", "PollResponse(messages.Message): \"\"\"Represents a response to a request for instructions given", "# Backend the machine belongs to. Generally required. backend =", "pending provisioning of additional capacity. PENDING = 1 # LeaseRequest", "request. NOT_FULFILLED = 2 # Request ID referred to a", "# instructed. request_id = messages.StringField(1, required=True) # Instruction to send", "leased. on_lease = messages.MessageField(Instruction, 6) # UTC seconds from epoch", "2015 The LUCI Authors. All rights reserved. # Use of", "must be unique per backend. \"\"\" # Dimensions instance specifying", "6) # UTC seconds from epoch when lease should expire.", "# Cloud Pub/Sub subscription the machine must listen to for", "from components.machine_provider.instructions import * from components.machine_provider.policies import * class CatalogMachineRetrievalRequest(messages.Message):", "= messages.StringField(3, required=True) class BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents responses to a batched", "# Instruction given to the machine. instruction = messages.MessageField(Instruction, 1)", "add a machine to the catalog. dimensions.backend must be specified.", "messages.IntegerField(8) class CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents a request to add a machine", "class LeaseRequestState(messages.Enum): \"\"\"Represents the state of a LeaseRequest.\"\"\" # LeaseRequest", "lease expiration seconds from epoch in UTC. lease_expiration_ts = messages.IntegerField(8)", "unique in the catalog. HOSTNAME_REUSE = 1 # Tried to", "request. pubsub_topic = messages.StringField(4) # Cloud Pub/Sub project name to", "\"\"\"Represents a response to a LeaseRequest.\"\"\" # SHA-1 identifying the", "MachineInstructionRequest(messages.Message): \"\"\"Represents a request to send an instruction to a", "an unfulfilled request. NOT_FULFILLED = 1 # Request ID referred", "is not in the future. LEASE_EXPIRATION_TS_ERROR = 9 # Neither", "= 5 class LeaseReleaseResponse(messages.Message): \"\"\"Represents a response to a LeaseReleaseRequest.\"\"\"", "of the CatalogMachineEntry. state = messages.StringField(3) # Cloud Pub/Sub subscription", "CatalogMachineAdditionRequests. dimensions.backend must be specified in each CatalogMachineAdditionRequest. dimensions.hostname must", "LeaseRequests.\"\"\" # LeaseRequest instances to batch together. requests = messages.MessageField(LeaseRequest,", "Pub/Sub subscription the machine must listen to for instructions. pubsub_subscription", "seconds from epoch in UTC. lease_expiration_ts = messages.IntegerField(8) class CatalogMachineAdditionRequest(messages.Message):", "duration and lease_expiration_ts both specified. MUTUAL_EXCLUSION_ERROR = 7 # Proposed", "9 # Neither duration nor lease_expiration_ts were specified. LEASE_LENGTH_UNSPECIFIED =", "messages.StringField(6) # Project the Cloud Pub/Sub topic exists in. pubsub_topic_project", "3 # Request couldn't be processed in time. DEADLINE_EXCEEDED =", "* from components.machine_provider.policies import * class CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents a request", "request. MISMATCHED_BACKEND = 4 # Didn't specify a hostname. UNSPECIFIED_HOSTNAME", "CatalogMachineEntry. state = messages.StringField(3) # Cloud Pub/Sub subscription the machine", "# LeaseRequestError instance indicating an error with the request, or", "a LeaseRequest.\"\"\" # Request IDs are intended to be unique.", "the backend originating the request. MISMATCHED_BACKEND = 4 # Didn't", "Policies instance specifying machine-specific configuration. policies = messages.MessageField(Policies, 2, required=True)", "repeated=True) class MachineInstructionRequest(messages.Message): \"\"\"Represents a request to send an instruction", "instance indicating an error with the # request, or None", "be unique per backend. \"\"\" # CatalogMachineAdditionRequest instances to batch", "repeated=True) class LeaseReleaseRequestError(messages.Enum): \"\"\"Represents an error in a LeaseReleaseRequest.\"\"\" #", "from the catalog.\"\"\" # Hostname of the machine to retrieve.", "# Reusing a request ID in a different request is", "to a MachineInstructionRequest.\"\"\" # Request ID used by the client", "# if there is no error. error = messages.EnumField(MachineInstructionError, 2)", "be subscribed to. pubsub_topic = messages.StringField(6) # Project the Cloud", "instructions given to a machine.\"\"\" # Instruction given to the", "was # already reclaimed. ALREADY_RECLAIMED = 3 # Request couldn't", "= messages.MessageField(Instruction, 2) class MachineInstructionError(messages.Enum): \"\"\"Represents an error in a", "8 # Proposed expiration time is not in the future.", "\"\"\"Represents an error in a LeaseRequest.\"\"\" # Request IDs are", "machine this is. dimensions = messages.MessageField(Dimensions, 1, required=True) # Policies", "dimensions = messages.MessageField(Dimensions, 1, required=True) # Policies instance specifying machine-specific", "if there is no error. error = messages.EnumField(LeaseRequestError, 2) #", "of the machine whose instruction to ack. hostname = messages.StringField(1,", "didn't match the backend originating the request. MISMATCHED_BACKEND = 4", "the catalog.\"\"\" # Hostname of the machine to retrieve. hostname", "Cloud Pub/Sub project name to communicate on regarding this request.", "once it's been leased. on_lease = messages.MessageField(Instruction, 6) # UTC", "Provider API.\"\"\" # pylint: disable=unused-wildcard-import, wildcard-import from protorpc import messages", "if there is no error. error = messages.EnumField(MachineInstructionError, 2) class", "backend originating the request. MISMATCHED_BACKEND = 4 # Didn't specify", "CatalogMachineAdditionRequest. dimensions.hostname must be unique per backend. \"\"\" # CatalogMachineAdditionRequest", "= messages.EnumField(LeaseRequestError, 2) # Request ID used by the client", "to ack an instruction received by a machine.\"\"\" # Hostname", "1 # LeaseRequest has been fulfilled. FULFILLED = 2 #", "no error. error = messages.EnumField(LeaseRequestError, 2) # Request ID used", "1) # CatalogMachineAdditionRequest this response is in reference to. machine_addition_request", "error in a MachineInstructionRequest.\"\"\" # Request ID referred to an", "# Proposed Cloud Pub/Sub project was invalid. INVALID_PROJECT = 3", "set of LeaseRequests.\"\"\" # LeaseRequest instances to batch together. requests", "the machine once it's been leased. on_lease = messages.MessageField(Instruction, 6)", "IDs are intended to be unique. # Reusing a request", "this response refers to. request_hash = messages.StringField(1) # LeaseRequestError instance", "responses to a batched set of lease release requests.\"\"\" responses", "required=True) # Backend which added the machine. backend = messages.EnumField(Backend,", "yet. UNTRIAGED = 0 # LeaseRequest is pending provisioning of", "request is an error. REQUEST_ID_REUSE = 1 # Proposed Cloud", "1 # Request ID referred to a fulfilled request whose", "messages.StringField(3) # Cloud Pub/Sub subscription the machine must listen to", "= 9 # Neither duration nor lease_expiration_ts were specified. LEASE_LENGTH_UNSPECIFIED", "the future. LEASE_EXPIRATION_TS_ERROR = 9 # Neither duration nor lease_expiration_ts", "machine. policies = messages.MessageField(Policies, 2) # State of the CatalogMachineEntry.", "LeaseRequest this response refers to. request_hash = messages.StringField(1) # LeaseRequestError", "the LeaseRequest. request_id = messages.StringField(1, required=True) class BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents a", "LeaseReleaseRequestError(messages.Enum): \"\"\"Represents an error in a LeaseReleaseRequest.\"\"\" # Request ID", "to ack. hostname = messages.StringField(1, required=True) # Backend the machine", "pubsub_topic_project = messages.StringField(7) # Timestamp indicating lease expiration seconds from", "= 4 # Didn't specify a hostname. UNSPECIFIED_HOSTNAME = 5", "a leased machine.\"\"\" # Request ID for the fulfilled LeaseRequest", "DENIED = 3 class LeaseResponse(messages.Message): \"\"\"Represents a response to a", "identify the LeaseRequest. request_id = messages.StringField(1, required=True) class BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents", "2, required=True) # Desired length of the lease in seconds.", "messages.MessageField(Dimensions, 1) # Policies governing this machine. policies = messages.MessageField(Policies,", "State of the CatalogMachineEntry. state = messages.StringField(3) # Cloud Pub/Sub", "topic the machine must be subscribed to. pubsub_topic = messages.StringField(6)", "Proposed Cloud Pub/Sub project was invalid. INVALID_PROJECT = 3 #", "error = messages.EnumField(LeaseReleaseRequestError, 2) # Request ID used by the", "machine to retrieve. hostname = messages.StringField(1, required=True) # Backend which", "to an unfulfilled request. NOT_FULFILLED = 1 # Request ID", "Policies governing this machine. policies = messages.MessageField(Policies, 2) # State", "\"\"\" # CatalogMachineAdditionRequest instances to batch together. requests = messages.MessageField(", "a batched set of CatalogMachineAdditionRequests. dimensions.backend must be specified in", "zero or negative. NONPOSITIVE_DEADLINE = 8 # Proposed expiration time", "responses = messages.MessageField(LeaseReleaseResponse, 1, repeated=True) class MachineInstructionRequest(messages.Message): \"\"\"Represents a request", "required=True) class CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents a batched set of CatalogMachineAdditionRequests. dimensions.backend", "error. TRANSIENT_ERROR = 5 class LeaseReleaseResponse(messages.Message): \"\"\"Represents a response to", "to delete a machine in the catalog.\"\"\" # Dimensions instance", "specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 8 # Attempted", "REQUEST_ID_REUSE = 1 # Proposed Cloud Pub/Sub topic was invalid.", "referred to by the LeaseReleaseRequest. client_request_id = messages.StringField(3, required=True) class", "class MachineInstructionRequest(messages.Message): \"\"\"Represents a request to send an instruction to", "CatalogManipulationResponse, 1, repeated=True) class LeaseRequest(messages.Message): \"\"\"Represents a request for a", "# that can be found in the LICENSE file. \"\"\"Messages", "request. pubsub_project = messages.StringField(5) # Instructions to give the machine", "= 3 # Didn't specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC", "\"\"\"Represents a request to voluntarily cancel a LeaseRequest.\"\"\" # Per-user", "the machine whose instructions to retrieve. hostname = messages.StringField(1, required=True)", "messages.StringField(1) # LeaseReleaseRequestError indicating an error with the request, or", "= messages.EnumField(Backend, 2) class CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents a response to a", "an error with the # request, or None if there", "batch together. requests = messages.MessageField(LeaseRequest, 1, repeated=True) class LeaseRequestError(messages.Enum): \"\"\"Represents", "1 # Tried to lookup an entry that didn't exist.", "messages.StringField(3, required=True) class BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents responses to a batched set", "request.\"\"\" # Dimensions instance specifying what sort of machine this", "the machine. backend = messages.EnumField(Backend, 2) class CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents a", "long. LEASE_TOO_LONG = 11 class LeaseRequestState(messages.Enum): \"\"\"Represents the state of", "indicating lease expiration seconds from epoch in UTC. lease_expiration_ts =", "a backend. UNSPECIFIED_BACKEND = 3 # Specified backend didn't match", "in UTC. lease_expiration_ts = messages.IntegerField(8) class CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents a request", "LEASE_TOO_LONG = 11 class LeaseRequestState(messages.Enum): \"\"\"Represents the state of a", "the catalog. HOSTNAME_REUSE = 1 # Tried to lookup an", "a request to poll for instructions given to a machine.\"\"\"", "lease_expiration_ts = messages.IntegerField(7) class BatchedLeaseRequest(messages.Message): \"\"\"Represents a batched set of", "can be found in the LICENSE file. \"\"\"Messages for the", "a machine in the catalog.\"\"\" # Dimensions instance specifying what", "a request to add a machine to the catalog. dimensions.backend", "\"\"\"Represents a batched set of lease release requests.\"\"\" requests =", "no error. error = messages.EnumField(CatalogManipulationRequestError, 1) # CatalogMachineAdditionRequest this response", "= messages.StringField(1) # LeaseRequestError instance indicating an error with the", "intended to be unique. # Reusing a request ID in", "\"\"\"Represents a request to delete a machine in the catalog.\"\"\"", "= messages.StringField(1) # LeaseReleaseRequestError indicating an error with the request,", "manipulation request.\"\"\" # CatalogManipulationRequestError instance indicating an error with the", "request ID in a different request is an error. REQUEST_ID_REUSE", "when lease should expire. lease_expiration_ts = messages.IntegerField(7) class BatchedLeaseRequest(messages.Message): \"\"\"Represents", "of lease release requests.\"\"\" responses = messages.MessageField(LeaseReleaseResponse, 1, repeated=True) class", "release requests.\"\"\" requests = messages.MessageField(LeaseReleaseRequest, 1, repeated=True) class LeaseReleaseRequestError(messages.Enum): \"\"\"Represents", "3) class CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents a response to a batched catalog", "class BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents a batched set of lease release requests.\"\"\"", "to a machine.\"\"\" # Instruction given to the machine. instruction", "in reference to. machine_addition_request = messages.MessageField( CatalogMachineAdditionRequest, 2) # CatalogMachineDeletionRequest", "messages.MessageField(LeaseRequest, 1, repeated=True) class LeaseRequestError(messages.Enum): \"\"\"Represents an error in a", "= messages.MessageField(Policies, 2) # State of the CatalogMachineEntry. state =", "length of the lease in seconds. duration = messages.IntegerField(3) #", "generate the LeaseRequest # referred to by the LeaseReleaseRequest. client_request_id", "LeaseRequest has been fulfilled. FULFILLED = 2 # LeaseRequest has", "already reclaimed. ALREADY_RECLAIMED = 3 # Request couldn't be processed", "send the leased machine. instruction = messages.MessageField(Instruction, 2) class MachineInstructionError(messages.Enum):", "class CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents a response to a catalog machine retrieval", "LeaseRequest. request_id = messages.StringField(1, required=True) class BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents a batched", "1) # State of the instruction. state = messages.StringField(2) class", "CatalogMachineDeletionRequest, 3) class CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents a response to a batched", "Desired length of the lease in seconds. duration = messages.IntegerField(3)", "4 # Request couldn't be processed in time. DEADLINE_EXCEEDED =", "error = messages.EnumField(MachineInstructionError, 2) class PollRequest(messages.Message): \"\"\"Represents a request to", "# Request ID for the fulfilled LeaseRequest whose machine should", "wildcard-import from protorpc import messages from components.machine_provider.dimensions import * from", "batch together. requests = messages.MessageField( CatalogMachineAdditionRequest, 1, repeated=True) class CatalogMachineDeletionRequest(messages.Message):", "instance specifying what sort of machine to lease. dimensions =", "# Backend the machine belongs to. backend = messages.EnumField(Backend, 2)", "of additional capacity. PENDING = 1 # LeaseRequest has been", "is pending provisioning of additional capacity. PENDING = 1 #", "3 class LeaseResponse(messages.Message): \"\"\"Represents a response to a LeaseRequest.\"\"\" #", "refers to. request_hash = messages.StringField(1) # LeaseRequestError instance indicating an", "= 0 # LeaseRequest is pending provisioning of additional capacity.", "instances to batch together. requests = messages.MessageField(LeaseRequest, 1, repeated=True) class", "# machine being instructed. client_request_id = messages.StringField(1, required=True) # MachineInstructionError", "error with the # request, or None if there is", "# request, or None if there is no error. error", "release requests.\"\"\" responses = messages.MessageField(LeaseReleaseResponse, 1, repeated=True) class MachineInstructionRequest(messages.Message): \"\"\"Represents", "1 # Proposed Cloud Pub/Sub topic was invalid. INVALID_TOPIC =", "Instructions to give the machine once it's been leased. on_lease", "catalog machine retrieval request.\"\"\" # Dimensions instance specifying what sort", "= 9 class CatalogManipulationResponse(messages.Message): \"\"\"Represents a response to a catalog", "catalog. HOSTNAME_REUSE = 1 # Tried to lookup an entry", "are intended to be unique. # Reusing a request ID", "LICENSE file. \"\"\"Messages for the Machine Provider API.\"\"\" # pylint:", "# Miscellaneous transient error. TRANSIENT_ERROR = 5 class LeaseReleaseResponse(messages.Message): \"\"\"Represents", "of the lease in seconds. duration = messages.IntegerField(3) # Cloud", "required. backend = messages.EnumField(Backend, 2) class PollResponse(messages.Message): \"\"\"Represents a response", "Pub/Sub topic was invalid. INVALID_TOPIC = 2 # Proposed Cloud", "was invalid. INVALID_PROJECT = 7 # Didn't specify a Cloud", "a batched set of LeaseRequests.\"\"\" # LeaseRequest instances to batch", "the machine. INVALID_INSTRUCTION = 3 class MachineInstructionResponse(messages.Message): \"\"\"Represents a response", "leased machine. LEASED = 9 class CatalogManipulationResponse(messages.Message): \"\"\"Represents a response", "or None if there is no error. error = messages.EnumField(CatalogManipulationRequestError,", "has been fulfilled. FULFILLED = 2 # LeaseRequest has been", "= 3 # Specified backend didn't match the backend originating", "1 # Request ID referred to an unfulfilled request. NOT_FULFILLED", "# if there is no error. error = messages.EnumField(LeaseRequestError, 2)", "LeaseRequestState(messages.Enum): \"\"\"Represents the state of a LeaseRequest.\"\"\" # LeaseRequest has", "\"\"\"Represents the state of a LeaseRequest.\"\"\" # LeaseRequest has been", "Cloud Pub/Sub project was invalid. INVALID_PROJECT = 3 # Didn't", "send an instruction to a leased machine.\"\"\" # Request ID", "CatalogManipulationResponse(messages.Message): \"\"\"Represents a response to a catalog manipulation request.\"\"\" #", "= messages.StringField(4) # Cloud Pub/Sub project name to communicate on", "\"\"\"Represents a request for a lease on a machine.\"\"\" #", "set of CatalogMachineAdditionRequests. dimensions.backend must be specified in each CatalogMachineAdditionRequest.", "the Apache License, Version 2.0 # that can be found", "couldn't be processed in time. DEADLINE_EXCEEDED = 5 # Miscellaneous", "topic. UNSPECIFIED_TOPIC = 8 # Attempted to delete a leased", "messages.StringField(1, required=True) # MachineInstructionError indicating an error with the request,", "must be specified. dimensions.hostname must be unique per backend. \"\"\"", "= messages.EnumField(LeaseRequestState, 4) # Hostname of the machine available for", "Cloud Pub/Sub project was invalid. INVALID_PROJECT = 7 # Didn't", "no error. error = messages.EnumField(MachineInstructionError, 2) class PollRequest(messages.Message): \"\"\"Represents a", "Version 2.0 # that can be found in the LICENSE", "an error in a LeaseRequest.\"\"\" # Request IDs are intended", "received by a machine.\"\"\" # Hostname of the machine whose", "be specified. dimensions.hostname must be unique per backend. \"\"\" #", "from components.machine_provider.dimensions import * from components.machine_provider.instructions import * from components.machine_provider.policies", "catalog manipulation request.\"\"\" responses = messages.MessageField( CatalogManipulationResponse, 1, repeated=True) class", "to batch together. requests = messages.MessageField(LeaseRequest, 1, repeated=True) class LeaseRequestError(messages.Enum):", "an error. REQUEST_ID_REUSE = 1 # Proposed Cloud Pub/Sub topic", "dimensions = messages.MessageField(Dimensions, 2, required=True) # Desired length of the", "Pub/Sub project name to communicate on regarding this request. pubsub_project", "code is governed under the Apache License, Version 2.0 #", "there is no error. error = messages.EnumField(LeaseReleaseRequestError, 2) # Request", "= 3 # Request couldn't be processed in time. DEADLINE_EXCEEDED", "a machine.\"\"\" # Hostname of the machine whose instruction to", "catalog manipulation request.\"\"\" # Per backend, hostnames must be unique", "in the future. LEASE_EXPIRATION_TS_ERROR = 9 # Neither duration nor", "= messages.IntegerField(6) class BatchedLeaseResponse(messages.Message): \"\"\"Represents a response to a batched", "the LeaseRequest this response refers to. request_hash = messages.StringField(1) #", "the machine belongs to. Generally required. backend = messages.EnumField(Backend, 2)", "BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents a batched set of lease release requests.\"\"\" requests", "NOT_FULFILLED = 2 # Request ID referred to a fulfilled", "responses = messages.MessageField(LeaseResponse, 1, repeated=True) class LeaseReleaseRequest(messages.Message): \"\"\"Represents a request", "too long. LEASE_TOO_LONG = 11 class LeaseRequestState(messages.Enum): \"\"\"Represents the state", "state = messages.StringField(2) class AckRequest(messages.Message): \"\"\"Represents a request to ack", "batched set of lease release requests.\"\"\" requests = messages.MessageField(LeaseReleaseRequest, 1,", "duration is too long. LEASE_TOO_LONG = 11 class LeaseRequestState(messages.Enum): \"\"\"Represents", "# State of the LeaseRequest. state = messages.EnumField(LeaseRequestState, 4) #", "of the machine whose instructions to retrieve. hostname = messages.StringField(1,", "each CatalogMachineAdditionRequest. dimensions.hostname must be unique per backend. \"\"\" #", "error = messages.EnumField(LeaseRequestError, 2) # Request ID used by the", "LeaseRequest(messages.Message): \"\"\"Represents a request for a lease on a machine.\"\"\"", "instruction to a leased machine.\"\"\" # Request ID for the", "Cloud Pub/Sub topic name to communicate on regarding this request.", "a response to a batched lease request.\"\"\" responses = messages.MessageField(LeaseResponse,", "Specified backend didn't match the backend originating the request. MISMATCHED_BACKEND", "topic was invalid. INVALID_TOPIC = 2 # Proposed Cloud Pub/Sub", "a LeaseRequest.\"\"\" # LeaseRequest has been received, but not processed", "ALREADY_RECLAIMED = 2 # Invalid instruction for the machine. INVALID_INSTRUCTION", "of the LeaseRequest. state = messages.EnumField(LeaseRequestState, 4) # Hostname of", "LeaseRequest this response refers to. request_hash = messages.StringField(1) # LeaseReleaseRequestError", "lease_expiration_ts = messages.IntegerField(8) class CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents a request to add", "messages.EnumField(Backend, 2) class PollResponse(messages.Message): \"\"\"Represents a response to a request", "= 8 # Proposed expiration time is not in the", "2) # Request ID used by the client to generate", "to a leased machine.\"\"\" # Request ID for the fulfilled", "which added the machine. backend = messages.EnumField(Backend, 2) class CatalogMachineRetrievalResponse(messages.Message):", "this is. dimensions = messages.MessageField(Dimensions, 1, required=True) class CatalogManipulationRequestError(messages.Enum): \"\"\"Represents", "# State of the CatalogMachineEntry. state = messages.StringField(3) # Cloud", "LeaseResponse(messages.Message): \"\"\"Represents a response to a LeaseRequest.\"\"\" # SHA-1 identifying", "2 # Request ID referred to a fulfilled request whose", "backend. \"\"\" # Dimensions instance specifying what sort of machine", "retrieve. hostname = messages.StringField(1, required=True) # Backend the machine belongs", "the # request, or None if there is no error.", "5 # Miscellaneous transient error. TRANSIENT_ERROR = 6 # Mutually", "referred to non-existent request for this user. NOT_FOUND = 1", "source code is governed under the Apache License, Version 2.0", "to a catalog machine retrieval request.\"\"\" # Dimensions instance specifying", "a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 4 # Request couldn't", "ack. hostname = messages.StringField(1, required=True) # Backend the machine belongs", "instruction = messages.MessageField(Instruction, 1) # State of the instruction. state", "ID for the fulfilled LeaseRequest whose machine should be #", "must be subscribed to. pubsub_topic = messages.StringField(6) # Project the", "to for instructions. pubsub_subscription = messages.StringField(4) # Project the Cloud", "reference to. machine_deletion_request = messages.MessageField( CatalogMachineDeletionRequest, 3) class CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents", "requests.\"\"\" requests = messages.MessageField(LeaseReleaseRequest, 1, repeated=True) class LeaseReleaseRequestError(messages.Enum): \"\"\"Represents an", "under the Apache License, Version 2.0 # that can be", "project was invalid. INVALID_PROJECT = 7 # Didn't specify a", "# Request IDs are intended to be unique. # Reusing", "ID used by the client to generate the LeaseRequest. client_request_id", "# Tried to lookup an entry that didn't exist. ENTRY_NOT_FOUND", "on a machine.\"\"\" # Per-user unique ID used to deduplicate", "Didn't specify a backend. UNSPECIFIED_BACKEND = 3 # Specified backend", "\"\"\"Represents a batched set of LeaseRequests.\"\"\" # LeaseRequest instances to", "\"\"\"Represents a response to a catalog manipulation request.\"\"\" # CatalogManipulationRequestError", "response to a catalog manipulation request.\"\"\" # CatalogManipulationRequestError instance indicating", "with the request, or None # if there is no", "ID referred to a fulfilled request whose machine was #", "to poll for instructions given to a machine.\"\"\" # Hostname", "1) # Policies governing this machine. policies = messages.MessageField(Policies, 2)", "was # already reclaimed. ALREADY_RECLAIMED = 2 # Invalid instruction", "for the machine. INVALID_INSTRUCTION = 3 class MachineInstructionResponse(messages.Message): \"\"\"Represents a", "in reference to. machine_deletion_request = messages.MessageField( CatalogMachineDeletionRequest, 3) class CatalogBatchManipulationResponse(messages.Message):", "machine this is. dimensions = messages.MessageField(Dimensions, 1, required=True) class CatalogManipulationRequestError(messages.Enum):", "# State of the instruction. state = messages.StringField(2) class AckRequest(messages.Message):", "= 8 # Attempted to delete a leased machine. LEASED", "\"\"\" # Dimensions instance specifying what sort of machine this", "\"\"\"Represents a request to add a machine to the catalog.", "request. NOT_FULFILLED = 1 # Request ID referred to a", "machine whose instruction to ack. hostname = messages.StringField(1, required=True) #", "to retrieve. hostname = messages.StringField(1, required=True) # Backend which added", "= 4 # Request couldn't be processed in time. DEADLINE_EXCEEDED", "dimensions.hostname must be unique per backend. \"\"\" # CatalogMachineAdditionRequest instances", "a catalog manipulation request.\"\"\" # Per backend, hostnames must be", "class MachineInstructionResponse(messages.Message): \"\"\"Represents a response to a MachineInstructionRequest.\"\"\" # Request", "class CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents a request to add a machine to", "UNSPECIFIED_TOPIC = 8 # Attempted to delete a leased machine.", "Request ID referred to an unfulfilled request. NOT_FULFILLED = 1", "# Specified backend didn't match the backend originating the request.", "used to deduplicate requests. request_id = messages.StringField(1, required=True) # Dimensions", "components.machine_provider.policies import * class CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents a request to retrieve", "# Neither duration nor lease_expiration_ts were specified. LEASE_LENGTH_UNSPECIFIED = 10", "# Cloud Pub/Sub topic the machine must be subscribed to.", "dimensions.backend must be specified. dimensions.hostname must be unique per backend.", "backend = messages.EnumField(Backend, 2) class CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents a response to", "machine-specific configuration. policies = messages.MessageField(Policies, 2, required=True) class CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents", "= messages.IntegerField(7) class BatchedLeaseRequest(messages.Message): \"\"\"Represents a batched set of LeaseRequests.\"\"\"", "\"\"\"Represents a response to a MachineInstructionRequest.\"\"\" # Request ID used", "specify a hostname. UNSPECIFIED_HOSTNAME = 5 # Proposed Cloud Pub/Sub", "INVALID_PROJECT = 7 # Didn't specify a Cloud Pub/Sub topic.", "machine to lease. dimensions = messages.MessageField(Dimensions, 2, required=True) # Desired", "if there is no error. error = messages.EnumField(LeaseReleaseRequestError, 2) #", "= messages.StringField(3) # Cloud Pub/Sub subscription the machine must listen", "subscription exists in. pubsub_subscription_project = messages.StringField(5) # Cloud Pub/Sub topic", "# LeaseRequest instances to batch together. requests = messages.MessageField(LeaseRequest, 1,", "reclaimed. ALREADY_RECLAIMED = 2 # Invalid instruction for the machine.", "instruction for the machine. INVALID_INSTRUCTION = 3 class MachineInstructionResponse(messages.Message): \"\"\"Represents", "a response to a catalog manipulation request.\"\"\" # CatalogManipulationRequestError instance", "this machine. policies = messages.MessageField(Policies, 2) # State of the", "Pub/Sub topic exists in. pubsub_topic_project = messages.StringField(7) # Timestamp indicating", "from epoch when lease should expire. lease_expiration_ts = messages.IntegerField(7) class", "to communicate on regarding this request. pubsub_topic = messages.StringField(4) #", "# CatalogMachineAdditionRequest instances to batch together. requests = messages.MessageField( CatalogMachineAdditionRequest,", "if there is no error. error = messages.EnumField(CatalogManipulationRequestError, 1) #", "requests = messages.MessageField(LeaseReleaseRequest, 1, repeated=True) class LeaseReleaseRequestError(messages.Enum): \"\"\"Represents an error", "Dimensions instance specifying what sort of machine to lease. dimensions", "requests.\"\"\" responses = messages.MessageField(LeaseReleaseResponse, 1, repeated=True) class MachineInstructionRequest(messages.Message): \"\"\"Represents a", "exclusive duration and lease_expiration_ts both specified. MUTUAL_EXCLUSION_ERROR = 7 #", "2 # Didn't specify a backend. UNSPECIFIED_BACKEND = 3 #", "UTC seconds from epoch when lease should expire. lease_expiration_ts =", "# Proposed Cloud Pub/Sub topic was invalid. INVALID_TOPIC = 6", "Request ID referred to non-existent request for this user. NOT_FOUND", "# Proposed expiration time is not in the future. LEASE_EXPIRATION_TS_ERROR", "Authors. All rights reserved. # Use of this source code", "client_request_id = messages.StringField(1, required=True) # MachineInstructionError indicating an error with", "State of the instruction. state = messages.StringField(2) class AckRequest(messages.Message): \"\"\"Represents", "the # machine being instructed. client_request_id = messages.StringField(1, required=True) #", "# Copyright 2015 The LUCI Authors. All rights reserved. #", "must be unique per backend. \"\"\" # CatalogMachineAdditionRequest instances to", "machine.\"\"\" # Request ID for the fulfilled LeaseRequest whose machine", "CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents a batched set of CatalogMachineAdditionRequests. dimensions.backend must be", "Request IDs are intended to be unique. # Reusing a", "unique per backend. \"\"\" # Dimensions instance specifying what sort", "to by the LeaseReleaseRequest. client_request_id = messages.StringField(3, required=True) class BatchedLeaseReleaseResponse(messages.Message):", "expire. lease_expiration_ts = messages.IntegerField(7) class BatchedLeaseRequest(messages.Message): \"\"\"Represents a batched set", "# Proposed Cloud Pub/Sub topic was invalid. INVALID_TOPIC = 2", "the LeaseRequest. client_request_id = messages.StringField(3, required=True) # State of the", "# CatalogMachineDeletionRequest this response is in reference to. machine_deletion_request =", "Timestamp indicating lease expiration seconds from epoch in UTC. lease_expiration_ts", "# LeaseReleaseRequestError indicating an error with the request, or None", "# Didn't specify a backend. UNSPECIFIED_BACKEND = 3 # Specified", "a response to a catalog machine retrieval request.\"\"\" # Dimensions", "2) class PollRequest(messages.Message): \"\"\"Represents a request to poll for instructions", "what sort of machine this is. dimensions = messages.MessageField(Dimensions, 1)", "the client to generate the LeaseRequest # referred to by", "ID referred to an unfulfilled request. NOT_FULFILLED = 1 #", "2) class CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents a response to a catalog machine", "messages.IntegerField(3) # Cloud Pub/Sub topic name to communicate on regarding", "class PollRequest(messages.Message): \"\"\"Represents a request to poll for instructions given", "Cloud Pub/Sub subscription exists in. pubsub_subscription_project = messages.StringField(5) # Cloud", "= 4 # Miscellaneous transient error. TRANSIENT_ERROR = 5 class", "response to a MachineInstructionRequest.\"\"\" # Request ID used by the", "# Didn't specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 4", "Pub/Sub topic was invalid. INVALID_TOPIC = 6 # Proposed Cloud", "should be # instructed. request_id = messages.StringField(1, required=True) # Instruction", "given to a machine.\"\"\" # Instruction given to the machine.", "CatalogMachineAdditionRequest this response is in reference to. machine_addition_request = messages.MessageField(", "a lease on a machine.\"\"\" # Per-user unique ID used", "to communicate on regarding this request. pubsub_project = messages.StringField(5) #", "The LUCI Authors. All rights reserved. # Use of this", "messages from components.machine_provider.dimensions import * from components.machine_provider.instructions import * from", "protorpc import messages from components.machine_provider.dimensions import * from components.machine_provider.instructions import", "messages.StringField(4) # Project the Cloud Pub/Sub subscription exists in. pubsub_subscription_project", "ID used to identify the LeaseRequest. request_id = messages.StringField(1, required=True)", "INVALID_TOPIC = 2 # Proposed Cloud Pub/Sub project was invalid.", "DEADLINE_EXCEEDED = 4 # Miscellaneous transient error. TRANSIENT_ERROR = 5", "4 # Didn't specify a hostname. UNSPECIFIED_HOSTNAME = 5 #", "non-existent request for this user. NOT_FOUND = 1 # Request", "= messages.MessageField(Dimensions, 1, required=True) # Policies instance specifying machine-specific configuration.", "delete a leased machine. LEASED = 9 class CatalogManipulationResponse(messages.Message): \"\"\"Represents", "import * class CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents a request to retrieve a", "state = messages.StringField(3) # Cloud Pub/Sub subscription the machine must", "catalog.\"\"\" # Hostname of the machine to retrieve. hostname =", "Proposed expiration time is not in the future. LEASE_EXPIRATION_TS_ERROR =", "Pub/Sub project was invalid. INVALID_PROJECT = 3 # Didn't specify", "2.0 # that can be found in the LICENSE file.", "messages.EnumField(MachineInstructionError, 2) class PollRequest(messages.Message): \"\"\"Represents a request to poll for", "lookup an entry that didn't exist. ENTRY_NOT_FOUND = 2 #", "from components.machine_provider.policies import * class CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents a request to", "error in a LeaseRequest.\"\"\" # Request IDs are intended to", "Didn't specify a hostname. UNSPECIFIED_HOSTNAME = 5 # Proposed Cloud", "CatalogManipulationRequestError(messages.Enum): \"\"\"Represents an error in a catalog manipulation request.\"\"\" #", "Pub/Sub topic. UNSPECIFIED_TOPIC = 4 # Request couldn't be processed", "required=True) class CatalogManipulationRequestError(messages.Enum): \"\"\"Represents an error in a catalog manipulation", "from epoch in UTC. lease_expiration_ts = messages.IntegerField(6) class BatchedLeaseResponse(messages.Message): \"\"\"Represents", "error. error = messages.EnumField(CatalogManipulationRequestError, 1) # CatalogMachineAdditionRequest this response is", "# Miscellaneous transient error. TRANSIENT_ERROR = 6 # Mutually exclusive", "an instruction to a leased machine.\"\"\" # Request ID for", "= messages.StringField(1, required=True) # Dimensions instance specifying what sort of", "a request to ack an instruction received by a machine.\"\"\"", "class LeaseReleaseRequest(messages.Message): \"\"\"Represents a request to voluntarily cancel a LeaseRequest.\"\"\"", "LeaseRequest. state = messages.EnumField(LeaseRequestState, 4) # Hostname of the machine", "messages.MessageField( CatalogManipulationResponse, 1, repeated=True) class LeaseRequest(messages.Message): \"\"\"Represents a request for", "LeaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest this response refers to.", "None if there is no error. error = messages.EnumField(CatalogManipulationRequestError, 1)", "= 1 # Request ID referred to a fulfilled request", "to a machine.\"\"\" # Hostname of the machine whose instructions", "the LeaseRequest # referred to by the LeaseReleaseRequest. client_request_id =", "of the instruction. state = messages.StringField(2) class AckRequest(messages.Message): \"\"\"Represents a", "messages.MessageField(Policies, 2) # State of the CatalogMachineEntry. state = messages.StringField(3)", "subscribed to. pubsub_topic = messages.StringField(6) # Project the Cloud Pub/Sub", "seconds. duration = messages.IntegerField(3) # Cloud Pub/Sub topic name to", "machine to the catalog. dimensions.backend must be specified. dimensions.hostname must", "referred to a fulfilled request whose machine was # already", "catalog.\"\"\" # Dimensions instance specifying what sort of machine this", "error. TRANSIENT_ERROR = 6 # Mutually exclusive duration and lease_expiration_ts", "the state of a LeaseRequest.\"\"\" # LeaseRequest has been received,", "Dimensions instance specifying what sort of machine this is. dimensions", "be unique per backend. \"\"\" # Dimensions instance specifying what", "# CatalogManipulationRequestError instance indicating an error with the # request,", "state of a LeaseRequest.\"\"\" # LeaseRequest has been received, but", "in a MachineInstructionRequest.\"\"\" # Request ID referred to an unfulfilled", "should expire. lease_expiration_ts = messages.IntegerField(7) class BatchedLeaseRequest(messages.Message): \"\"\"Represents a batched", "machine. instruction = messages.MessageField(Instruction, 2) class MachineInstructionError(messages.Enum): \"\"\"Represents an error", "= messages.StringField(2) class AckRequest(messages.Message): \"\"\"Represents a request to ack an", "received, but not processed yet. UNTRIAGED = 0 # LeaseRequest", "set of lease release requests.\"\"\" responses = messages.MessageField(LeaseReleaseResponse, 1, repeated=True)", "8 # Attempted to delete a leased machine. LEASED =", "LUCI Authors. All rights reserved. # Use of this source", "PENDING = 1 # LeaseRequest has been fulfilled. FULFILLED =", "regarding this request. pubsub_topic = messages.StringField(4) # Cloud Pub/Sub project", "responses = messages.MessageField( CatalogManipulationResponse, 1, repeated=True) class LeaseRequest(messages.Message): \"\"\"Represents a", "class LeaseRequestError(messages.Enum): \"\"\"Represents an error in a LeaseRequest.\"\"\" # Request", "messages.EnumField(CatalogManipulationRequestError, 1) # CatalogMachineAdditionRequest this response is in reference to.", "5 class LeaseReleaseResponse(messages.Message): \"\"\"Represents a response to a LeaseReleaseRequest.\"\"\" #", "per backend. \"\"\" # Dimensions instance specifying what sort of", "Proposed Cloud Pub/Sub topic was invalid. INVALID_TOPIC = 2 #", "class CatalogManipulationResponse(messages.Message): \"\"\"Represents a response to a catalog manipulation request.\"\"\"", "# LeaseRequest has been received, but not processed yet. UNTRIAGED", "to add a machine to the catalog. dimensions.backend must be", "Machine Provider API.\"\"\" # pylint: disable=unused-wildcard-import, wildcard-import from protorpc import", "to deduplicate requests. request_id = messages.StringField(1, required=True) # Dimensions instance", "LEASE_LENGTH_UNSPECIFIED = 10 # Requested lease duration is too long.", "\"\"\"Represents a request to send an instruction to a leased", "an unfulfilled request. NOT_FULFILLED = 2 # Request ID referred", "messages.MessageField(LeaseReleaseRequest, 1, repeated=True) class LeaseReleaseRequestError(messages.Enum): \"\"\"Represents an error in a", "# CatalogMachineAdditionRequest this response is in reference to. machine_addition_request =", "class BatchedLeaseResponse(messages.Message): \"\"\"Represents a response to a batched lease request.\"\"\"", "a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 8 # Attempted to", "unique per backend. \"\"\" # CatalogMachineAdditionRequest instances to batch together.", "request for a lease on a machine.\"\"\" # Per-user unique", "class BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents responses to a batched set of lease", "All rights reserved. # Use of this source code is", "4 # Miscellaneous transient error. TRANSIENT_ERROR = 5 class LeaseReleaseResponse(messages.Message):", "client to generate the LeaseRequest. client_request_id = messages.StringField(3, required=True) #", "BatchedLeaseRequest(messages.Message): \"\"\"Represents a batched set of LeaseRequests.\"\"\" # LeaseRequest instances", "Pub/Sub project was invalid. INVALID_PROJECT = 7 # Didn't specify", "to a fulfilled request whose machine was # already reclaimed.", "response to a catalog machine retrieval request.\"\"\" # Dimensions instance", "response to a batched lease request.\"\"\" responses = messages.MessageField(LeaseResponse, 1,", "9 class CatalogManipulationResponse(messages.Message): \"\"\"Represents a response to a catalog manipulation", "response to a LeaseReleaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest this", "= 1 # Proposed Cloud Pub/Sub topic was invalid. INVALID_TOPIC", "machine once it's been leased. on_lease = messages.MessageField(Instruction, 6) #", "required=True) # MachineInstructionError indicating an error with the request, or", "indicating an error with the request, or None # if", "in the catalog.\"\"\" # Dimensions instance specifying what sort of", "messages.StringField(1, required=True) # Backend the machine belongs to. Generally required.", "= 1 # Tried to lookup an entry that didn't", "there is no error. error = messages.EnumField(LeaseRequestError, 2) # Request", "to a batched set of lease release requests.\"\"\" responses =", "* class CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents a request to retrieve a machine", "messages.MessageField(Instruction, 6) # UTC seconds from epoch when lease should", "a request ID in a different request is an error.", "whose machine was # already reclaimed. ALREADY_RECLAIMED = 2 #", "LeaseReleaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest this response refers to.", "time. DEADLINE_EXCEEDED = 4 # Miscellaneous transient error. TRANSIENT_ERROR =", "request_hash = messages.StringField(1) # LeaseRequestError instance indicating an error with", "for a lease on a machine.\"\"\" # Per-user unique ID", "6 # Mutually exclusive duration and lease_expiration_ts both specified. MUTUAL_EXCLUSION_ERROR", "required=True) # Desired length of the lease in seconds. duration", "the request. MISMATCHED_BACKEND = 4 # Didn't specify a hostname.", "= messages.StringField(7) # Timestamp indicating lease expiration seconds from epoch", "lease release requests.\"\"\" responses = messages.MessageField(LeaseReleaseResponse, 1, repeated=True) class MachineInstructionRequest(messages.Message):", "NONPOSITIVE_DEADLINE = 8 # Proposed expiration time is not in", "to a batched lease request.\"\"\" responses = messages.MessageField(LeaseResponse, 1, repeated=True)", "messages.MessageField(Dimensions, 2, required=True) # Desired length of the lease in", "request for instructions given to a machine.\"\"\" # Instruction given", "provisioning of additional capacity. PENDING = 1 # LeaseRequest has", "reference to. machine_addition_request = messages.MessageField( CatalogMachineAdditionRequest, 2) # CatalogMachineDeletionRequest this", "Tried to lookup an entry that didn't exist. ENTRY_NOT_FOUND =", "cancel a LeaseRequest.\"\"\" # Per-user unique ID used to identify", "machine must be subscribed to. pubsub_topic = messages.StringField(6) # Project", "= messages.StringField(1, required=True) # Backend the machine belongs to. backend", "class LeaseRequest(messages.Message): \"\"\"Represents a request for a lease on a", "machine.\"\"\" # Per-user unique ID used to deduplicate requests. request_id", "to delete a leased machine. LEASED = 9 class CatalogManipulationResponse(messages.Message):", "# UTC seconds from epoch when lease should expire. lease_expiration_ts", "leased machine.\"\"\" # Request ID for the fulfilled LeaseRequest whose", "Pub/Sub subscription exists in. pubsub_subscription_project = messages.StringField(5) # Cloud Pub/Sub", "7 # Didn't specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC =", "Cloud Pub/Sub topic the machine must be subscribed to. pubsub_topic", "reserved. # Use of this source code is governed under", "# Mutually exclusive duration and lease_expiration_ts both specified. MUTUAL_EXCLUSION_ERROR =", "required=True) # Backend the machine belongs to. backend = messages.EnumField(Backend,", "# Per-user unique ID used to identify the LeaseRequest. request_id", "a response to a LeaseReleaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest", "# Hostname of the machine available for this request. hostname", "lease_expiration_ts were specified. LEASE_LENGTH_UNSPECIFIED = 10 # Requested lease duration", "\"\"\"Represents a batched set of CatalogMachineAdditionRequests. dimensions.backend must be specified", "messages.StringField(4) # Cloud Pub/Sub project name to communicate on regarding", "governed under the Apache License, Version 2.0 # that can", "by a machine.\"\"\" # Hostname of the machine whose instruction", "API.\"\"\" # pylint: disable=unused-wildcard-import, wildcard-import from protorpc import messages from", "1, required=True) # Policies instance specifying machine-specific configuration. policies =", "indicating an error with the # request, or None if", "Attempted to delete a leased machine. LEASED = 9 class", "NOT_FOUND = 1 # Request ID referred to an unfulfilled", "# Invalid instruction for the machine. INVALID_INSTRUCTION = 3 class", "to identify the LeaseRequest. request_id = messages.StringField(1, required=True) class BatchedLeaseReleaseRequest(messages.Message):", "SHA-1 identifying the LeaseRequest this response refers to. request_hash =", "Hostname of the machine whose instructions to retrieve. hostname =", "machine.\"\"\" # Instruction given to the machine. instruction = messages.MessageField(Instruction,", "request to voluntarily cancel a LeaseRequest.\"\"\" # Per-user unique ID", "LeaseRequest # referred to by the LeaseReleaseRequest. client_request_id = messages.StringField(3,", "a machine.\"\"\" # Hostname of the machine whose instructions to", "Backend the machine belongs to. Generally required. backend = messages.EnumField(Backend,", "specifying what sort of machine this is. dimensions = messages.MessageField(Dimensions,", "Invalid instruction for the machine. INVALID_INSTRUCTION = 3 class MachineInstructionResponse(messages.Message):", "of machine this is. dimensions = messages.MessageField(Dimensions, 1) # Policies", "name to communicate on regarding this request. pubsub_topic = messages.StringField(4)", "a fulfilled request whose machine was # already reclaimed. ALREADY_RECLAIMED", "negative. NONPOSITIVE_DEADLINE = 8 # Proposed expiration time is not", "messages.MessageField(LeaseReleaseResponse, 1, repeated=True) class MachineInstructionRequest(messages.Message): \"\"\"Represents a request to send", "machine. INVALID_INSTRUCTION = 3 class MachineInstructionResponse(messages.Message): \"\"\"Represents a response to", "machine_addition_request = messages.MessageField( CatalogMachineAdditionRequest, 2) # CatalogMachineDeletionRequest this response is", "unique ID used to deduplicate requests. request_id = messages.StringField(1, required=True)", "user. NOT_FOUND = 1 # Request ID referred to an", "manipulation request.\"\"\" # Per backend, hostnames must be unique in", "entry that didn't exist. ENTRY_NOT_FOUND = 2 # Didn't specify", "# Hostname of the machine to retrieve. hostname = messages.StringField(1,", "be unique in the catalog. HOSTNAME_REUSE = 1 # Tried", "hostname = messages.StringField(1, required=True) # Backend the machine belongs to.", "ID used by the client to generate the LeaseRequest for", "an error in a catalog manipulation request.\"\"\" # Per backend,", "the instruction. state = messages.StringField(2) class AckRequest(messages.Message): \"\"\"Represents a request", "lease request.\"\"\" responses = messages.MessageField(LeaseResponse, 1, repeated=True) class LeaseReleaseRequest(messages.Message): \"\"\"Represents", "LeaseRequest is pending provisioning of additional capacity. PENDING = 1", "Request ID used by the client to generate the LeaseRequest", "Project the Cloud Pub/Sub topic exists in. pubsub_topic_project = messages.StringField(7)", "UTC. lease_expiration_ts = messages.IntegerField(8) class CatalogMachineAdditionRequest(messages.Message): \"\"\"Represents a request to", "LeaseRequest whose machine should be # instructed. request_id = messages.StringField(1,", "delete a machine in the catalog.\"\"\" # Dimensions instance specifying", "= messages.StringField(1, required=True) # MachineInstructionError indicating an error with the", "has been denied. DENIED = 3 class LeaseResponse(messages.Message): \"\"\"Represents a", "in. pubsub_topic_project = messages.StringField(7) # Timestamp indicating lease expiration seconds", "to. machine_addition_request = messages.MessageField( CatalogMachineAdditionRequest, 2) # CatalogMachineDeletionRequest this response", "INVALID_TOPIC = 6 # Proposed Cloud Pub/Sub project was invalid.", "to non-existent request for this user. NOT_FOUND = 1 #", "is no error. error = messages.EnumField(LeaseRequestError, 2) # Request ID", "or None # if there is no error. error =", "the machine whose instruction to ack. hostname = messages.StringField(1, required=True)", "is. dimensions = messages.MessageField(Dimensions, 1) # Policies governing this machine.", "2 # Proposed Cloud Pub/Sub project was invalid. INVALID_PROJECT =", "1, repeated=True) class LeaseRequestError(messages.Enum): \"\"\"Represents an error in a LeaseRequest.\"\"\"", "been fulfilled. FULFILLED = 2 # LeaseRequest has been denied.", "\"\"\"Represents an error in a LeaseReleaseRequest.\"\"\" # Request ID referred", "in a catalog manipulation request.\"\"\" # Per backend, hostnames must", "epoch in UTC. lease_expiration_ts = messages.IntegerField(6) class BatchedLeaseResponse(messages.Message): \"\"\"Represents a", "this is. dimensions = messages.MessageField(Dimensions, 1, required=True) # Policies instance", "CatalogMachineAdditionRequest instances to batch together. requests = messages.MessageField( CatalogMachineAdditionRequest, 1,", "capacity. PENDING = 1 # LeaseRequest has been fulfilled. FULFILLED", "INVALID_PROJECT = 3 # Didn't specify a Cloud Pub/Sub topic.", "refers to. request_hash = messages.StringField(1) # LeaseReleaseRequestError indicating an error", "= 11 class LeaseRequestState(messages.Enum): \"\"\"Represents the state of a LeaseRequest.\"\"\"", "expiration seconds from epoch in UTC. lease_expiration_ts = messages.IntegerField(8) class", "import * from components.machine_provider.policies import * class CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents a", "it's been leased. on_lease = messages.MessageField(Instruction, 6) # UTC seconds", "Per-user unique ID used to identify the LeaseRequest. request_id =", "\"\"\"Represents a response to a request for instructions given to", "transient error. TRANSIENT_ERROR = 5 class LeaseReleaseResponse(messages.Message): \"\"\"Represents a response", "the machine. instruction = messages.MessageField(Instruction, 1) # State of the", "= messages.MessageField( CatalogMachineAdditionRequest, 2) # CatalogMachineDeletionRequest this response is in", "additional capacity. PENDING = 1 # LeaseRequest has been fulfilled.", "# Cloud Pub/Sub project name to communicate on regarding this", "exists in. pubsub_subscription_project = messages.StringField(5) # Cloud Pub/Sub topic the", "there is no error. error = messages.EnumField(MachineInstructionError, 2) class PollRequest(messages.Message):", "MachineInstructionError(messages.Enum): \"\"\"Represents an error in a MachineInstructionRequest.\"\"\" # Request ID", "for instructions given to a machine.\"\"\" # Instruction given to", "= messages.StringField(5) # Timestamp indicating lease expiration seconds from epoch", "unique. # Reusing a request ID in a different request", "dimensions.hostname must be unique per backend. \"\"\" # Dimensions instance", "1, required=True) class CatalogManipulationRequestError(messages.Enum): \"\"\"Represents an error in a catalog", "an error with the request, or None # if there", "the lease in seconds. duration = messages.IntegerField(3) # Cloud Pub/Sub", "has been received, but not processed yet. UNTRIAGED = 0", "specified. MUTUAL_EXCLUSION_ERROR = 7 # Proposed duration was zero or", "for the fulfilled LeaseRequest whose machine should be # instructed.", "to be unique. # Reusing a request ID in a", "didn't exist. ENTRY_NOT_FOUND = 2 # Didn't specify a backend.", "reclaimed. ALREADY_RECLAIMED = 3 # Request couldn't be processed in", "whose instruction to ack. hostname = messages.StringField(1, required=True) # Backend", "Proposed Cloud Pub/Sub topic was invalid. INVALID_TOPIC = 6 #", "by the client to generate the LeaseRequest for the #", "this request. hostname = messages.StringField(5) # Timestamp indicating lease expiration", "request. hostname = messages.StringField(5) # Timestamp indicating lease expiration seconds", "client_request_id = messages.StringField(3, required=True) # State of the LeaseRequest. state", "catalog manipulation request.\"\"\" # CatalogManipulationRequestError instance indicating an error with", "Requested lease duration is too long. LEASE_TOO_LONG = 11 class", "11 class LeaseRequestState(messages.Enum): \"\"\"Represents the state of a LeaseRequest.\"\"\" #", "been denied. DENIED = 3 class LeaseResponse(messages.Message): \"\"\"Represents a response", "# Proposed duration was zero or negative. NONPOSITIVE_DEADLINE = 8", "processed yet. UNTRIAGED = 0 # LeaseRequest is pending provisioning", "specified in each CatalogMachineAdditionRequest. dimensions.hostname must be unique per backend.", "must be specified in each CatalogMachineAdditionRequest. dimensions.hostname must be unique", "Instruction given to the machine. instruction = messages.MessageField(Instruction, 1) #", "= 5 # Miscellaneous transient error. TRANSIENT_ERROR = 6 #", "project was invalid. INVALID_PROJECT = 3 # Didn't specify a", "messages.StringField(7) # Timestamp indicating lease expiration seconds from epoch in", "# Request couldn't be processed in time. DEADLINE_EXCEEDED = 5", "a batched set of lease release requests.\"\"\" requests = messages.MessageField(LeaseReleaseRequest,", "\"\"\"Represents a request to retrieve a machine from the catalog.\"\"\"", "requests = messages.MessageField(LeaseRequest, 1, repeated=True) class LeaseRequestError(messages.Enum): \"\"\"Represents an error", "in a different request is an error. REQUEST_ID_REUSE = 1", "Instruction to send the leased machine. instruction = messages.MessageField(Instruction, 2)", "request to delete a machine in the catalog.\"\"\" # Dimensions", "instance specifying what sort of machine this is. dimensions =", "Didn't specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 4 #", "components.machine_provider.dimensions import * from components.machine_provider.instructions import * from components.machine_provider.policies import", "or negative. NONPOSITIVE_DEADLINE = 8 # Proposed expiration time is", "= messages.MessageField(LeaseReleaseRequest, 1, repeated=True) class LeaseReleaseRequestError(messages.Enum): \"\"\"Represents an error in", "error in a catalog manipulation request.\"\"\" # Per backend, hostnames", "= messages.StringField(1, required=True) # Backend which added the machine. backend", "is in reference to. machine_deletion_request = messages.MessageField( CatalogMachineDeletionRequest, 3) class", "name to communicate on regarding this request. pubsub_project = messages.StringField(5)", "nor lease_expiration_ts were specified. LEASE_LENGTH_UNSPECIFIED = 10 # Requested lease", "messages.StringField(1, required=True) # Instruction to send the leased machine. instruction", "class CatalogMachineRetrievalRequest(messages.Message): \"\"\"Represents a request to retrieve a machine from", "Hostname of the machine to retrieve. hostname = messages.StringField(1, required=True)", "the machine available for this request. hostname = messages.StringField(5) #", "in time. DEADLINE_EXCEEDED = 4 # Miscellaneous transient error. TRANSIENT_ERROR", "the machine must listen to for instructions. pubsub_subscription = messages.StringField(4)", "MachineInstructionError indicating an error with the request, or None #", "\"\"\"Represents a response to a LeaseReleaseRequest.\"\"\" # SHA-1 identifying the", "required=True) class BatchedLeaseReleaseResponse(messages.Message): \"\"\"Represents responses to a batched set of", "by the client to generate the LeaseRequest. client_request_id = messages.StringField(3,", "the catalog. dimensions.backend must be specified. dimensions.hostname must be unique", "denied. DENIED = 3 class LeaseResponse(messages.Message): \"\"\"Represents a response to", "to retrieve a machine from the catalog.\"\"\" # Hostname of", "messages.StringField(5) # Instructions to give the machine once it's been", "# LeaseRequest is pending provisioning of additional capacity. PENDING =", "added the machine. backend = messages.EnumField(Backend, 2) class CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents", "# LeaseRequest has been denied. DENIED = 3 class LeaseResponse(messages.Message):", "messages.MessageField(Dimensions, 1, required=True) class CatalogManipulationRequestError(messages.Enum): \"\"\"Represents an error in a", "response to a request for instructions given to a machine.\"\"\"", "1, repeated=True) class MachineInstructionRequest(messages.Message): \"\"\"Represents a request to send an", "the Machine Provider API.\"\"\" # pylint: disable=unused-wildcard-import, wildcard-import from protorpc", "response to a LeaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest this", "MachineInstructionRequest.\"\"\" # Request ID referred to an unfulfilled request. NOT_FULFILLED", "duration was zero or negative. NONPOSITIVE_DEADLINE = 8 # Proposed", "specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 4 # Request", "instances to batch together. requests = messages.MessageField( CatalogMachineAdditionRequest, 1, repeated=True)", "# LeaseRequest has been fulfilled. FULFILLED = 2 # LeaseRequest", "used by the client to generate the LeaseRequest. client_request_id =", "be processed in time. DEADLINE_EXCEEDED = 5 # Miscellaneous transient", "= messages.StringField(1, required=True) # Instruction to send the leased machine.", "= messages.MessageField(Policies, 2, required=True) class CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents a batched set", "leased machine. instruction = messages.MessageField(Instruction, 2) class MachineInstructionError(messages.Enum): \"\"\"Represents an", "machine.\"\"\" # Hostname of the machine whose instruction to ack.", "a batched lease request.\"\"\" responses = messages.MessageField(LeaseResponse, 1, repeated=True) class", "batched set of LeaseRequests.\"\"\" # LeaseRequest instances to batch together.", "exist. ENTRY_NOT_FOUND = 2 # Didn't specify a backend. UNSPECIFIED_BACKEND", "a leased machine. LEASED = 9 class CatalogManipulationResponse(messages.Message): \"\"\"Represents a", "# Policies instance specifying machine-specific configuration. policies = messages.MessageField(Policies, 2,", "batched set of CatalogMachineAdditionRequests. dimensions.backend must be specified in each", "on_lease = messages.MessageField(Instruction, 6) # UTC seconds from epoch when", "sort of machine this is. dimensions = messages.MessageField(Dimensions, 1, required=True)", "but not processed yet. UNTRIAGED = 0 # LeaseRequest is", "dimensions = messages.MessageField(Dimensions, 1) # Policies governing this machine. policies", "LeaseRequest.\"\"\" # LeaseRequest has been received, but not processed yet.", "invalid. INVALID_TOPIC = 2 # Proposed Cloud Pub/Sub project was", "a request to delete a machine in the catalog.\"\"\" #", "2) class PollResponse(messages.Message): \"\"\"Represents a response to a request for", "for instructions given to a machine.\"\"\" # Hostname of the", "response to a batched catalog manipulation request.\"\"\" responses = messages.MessageField(", "= messages.EnumField(Backend, 2) class PollResponse(messages.Message): \"\"\"Represents a response to a", "of LeaseRequests.\"\"\" # LeaseRequest instances to batch together. requests =", "different request is an error. REQUEST_ID_REUSE = 1 # Proposed", "2) # State of the CatalogMachineEntry. state = messages.StringField(3) #", "retrieve a machine from the catalog.\"\"\" # Hostname of the", "machine this is. dimensions = messages.MessageField(Dimensions, 1) # Policies governing", "to. machine_deletion_request = messages.MessageField( CatalogMachineDeletionRequest, 3) class CatalogBatchManipulationResponse(messages.Message): \"\"\"Represents a", "a MachineInstructionRequest.\"\"\" # Request ID used by the client to", "Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 8 # Attempted to delete", "been received, but not processed yet. UNTRIAGED = 0 #", "import messages from components.machine_provider.dimensions import * from components.machine_provider.instructions import *", "error. REQUEST_ID_REUSE = 1 # Proposed Cloud Pub/Sub topic was", "class CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents a batched set of CatalogMachineAdditionRequests. dimensions.backend must", "# Project the Cloud Pub/Sub topic exists in. pubsub_topic_project =", "LeaseRequest.\"\"\" # Per-user unique ID used to identify the LeaseRequest.", "class LeaseReleaseResponse(messages.Message): \"\"\"Represents a response to a LeaseReleaseRequest.\"\"\" # SHA-1", "LeaseRequest instances to batch together. requests = messages.MessageField(LeaseRequest, 1, repeated=True)", "hostname = messages.StringField(5) # Timestamp indicating lease expiration seconds from", "disable=unused-wildcard-import, wildcard-import from protorpc import messages from components.machine_provider.dimensions import *", "LeaseReleaseRequestError indicating an error with the request, or None #", "duration = messages.IntegerField(3) # Cloud Pub/Sub topic name to communicate", "in seconds. duration = messages.IntegerField(3) # Cloud Pub/Sub topic name", "whose instructions to retrieve. hostname = messages.StringField(1, required=True) # Backend", "# Proposed Cloud Pub/Sub project was invalid. INVALID_PROJECT = 7", "fulfilled. FULFILLED = 2 # LeaseRequest has been denied. DENIED", "being instructed. client_request_id = messages.StringField(1, required=True) # MachineInstructionError indicating an", "1, repeated=True) class LeaseReleaseRequestError(messages.Enum): \"\"\"Represents an error in a LeaseReleaseRequest.\"\"\"", "= messages.StringField(1, required=True) # Backend the machine belongs to. Generally", "messages.MessageField(LeaseResponse, 1, repeated=True) class LeaseReleaseRequest(messages.Message): \"\"\"Represents a request to voluntarily", "requests. request_id = messages.StringField(1, required=True) # Dimensions instance specifying what", "hostnames must be unique in the catalog. HOSTNAME_REUSE = 1", "error in a LeaseReleaseRequest.\"\"\" # Request ID referred to non-existent", "UNSPECIFIED_TOPIC = 4 # Request couldn't be processed in time.", "required=True) # Backend the machine belongs to. Generally required. backend", "instructions. pubsub_subscription = messages.StringField(4) # Project the Cloud Pub/Sub subscription", "lease_expiration_ts = messages.IntegerField(6) class BatchedLeaseResponse(messages.Message): \"\"\"Represents a response to a", "whose machine was # already reclaimed. ALREADY_RECLAIMED = 3 #", "LeaseRequest for the # machine being instructed. client_request_id = messages.StringField(1,", "6 # Proposed Cloud Pub/Sub project was invalid. INVALID_PROJECT =", "# Dimensions instance specifying what sort of machine to lease.", "of machine this is. dimensions = messages.MessageField(Dimensions, 1, required=True) #", "processed in time. DEADLINE_EXCEEDED = 4 # Miscellaneous transient error.", "# pylint: disable=unused-wildcard-import, wildcard-import from protorpc import messages from components.machine_provider.dimensions", "instruction = messages.MessageField(Instruction, 2) class MachineInstructionError(messages.Enum): \"\"\"Represents an error in", "request.\"\"\" responses = messages.MessageField(LeaseResponse, 1, repeated=True) class LeaseReleaseRequest(messages.Message): \"\"\"Represents a", "class AckRequest(messages.Message): \"\"\"Represents a request to ack an instruction received", "# Project the Cloud Pub/Sub subscription exists in. pubsub_subscription_project =", "an error in a LeaseReleaseRequest.\"\"\" # Request ID referred to", "this response refers to. request_hash = messages.StringField(1) # LeaseReleaseRequestError indicating", "catalog. dimensions.backend must be specified. dimensions.hostname must be unique per", "ENTRY_NOT_FOUND = 2 # Didn't specify a backend. UNSPECIFIED_BACKEND =", "# Use of this source code is governed under the", "together. requests = messages.MessageField(LeaseRequest, 1, repeated=True) class LeaseRequestError(messages.Enum): \"\"\"Represents an", "that can be found in the LICENSE file. \"\"\"Messages for", "ack an instruction received by a machine.\"\"\" # Hostname of", "were specified. LEASE_LENGTH_UNSPECIFIED = 10 # Requested lease duration is", "originating the request. MISMATCHED_BACKEND = 4 # Didn't specify a", "UNSPECIFIED_BACKEND = 3 # Specified backend didn't match the backend", "class BatchedLeaseRequest(messages.Message): \"\"\"Represents a batched set of LeaseRequests.\"\"\" # LeaseRequest", "messages.EnumField(Backend, 2) class CatalogMachineRetrievalResponse(messages.Message): \"\"\"Represents a response to a catalog", "Proposed Cloud Pub/Sub project was invalid. INVALID_PROJECT = 7 #", "a request to send an instruction to a leased machine.\"\"\"", "whose machine should be # instructed. request_id = messages.StringField(1, required=True)", "for instructions. pubsub_subscription = messages.StringField(4) # Project the Cloud Pub/Sub", "error. error = messages.EnumField(LeaseRequestError, 2) # Request ID used by", "to. pubsub_topic = messages.StringField(6) # Project the Cloud Pub/Sub topic", "to a LeaseReleaseRequest.\"\"\" # SHA-1 identifying the LeaseRequest this response", "been leased. on_lease = messages.MessageField(Instruction, 6) # UTC seconds from", "is no error. error = messages.EnumField(MachineInstructionError, 2) class PollRequest(messages.Message): \"\"\"Represents", "repeated=True) class CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents a request to delete a machine", "Request ID referred to an unfulfilled request. NOT_FULFILLED = 2", "# Didn't specify a Cloud Pub/Sub topic. UNSPECIFIED_TOPIC = 8", "lease expiration seconds from epoch in UTC. lease_expiration_ts = messages.IntegerField(6)", "Generally required. backend = messages.EnumField(Backend, 2) class PollResponse(messages.Message): \"\"\"Represents a", "policies = messages.MessageField(Policies, 2) # State of the CatalogMachineEntry. state", "= messages.StringField(1, required=True) class BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents a batched set of", "machine was # already reclaimed. ALREADY_RECLAIMED = 2 # Invalid", "sort of machine to lease. dimensions = messages.MessageField(Dimensions, 2, required=True)", "HOSTNAME_REUSE = 1 # Tried to lookup an entry that", "belongs to. Generally required. backend = messages.EnumField(Backend, 2) class PollResponse(messages.Message):", "PollRequest(messages.Message): \"\"\"Represents a request to poll for instructions given to", "# Cloud Pub/Sub topic name to communicate on regarding this", "= messages.MessageField(Dimensions, 1, required=True) class CatalogManipulationRequestError(messages.Enum): \"\"\"Represents an error in", "Pub/Sub topic name to communicate on regarding this request. pubsub_topic", "was invalid. INVALID_PROJECT = 3 # Didn't specify a Cloud", "to lookup an entry that didn't exist. ENTRY_NOT_FOUND = 2", "of lease release requests.\"\"\" requests = messages.MessageField(LeaseReleaseRequest, 1, repeated=True) class", "repeated=True) class LeaseRequestError(messages.Enum): \"\"\"Represents an error in a LeaseRequest.\"\"\" #", "configuration. policies = messages.MessageField(Policies, 2, required=True) class CatalogMachineBatchAdditionRequest(messages.Message): \"\"\"Represents a", "Per backend, hostnames must be unique in the catalog. HOSTNAME_REUSE", "LeaseRequest.\"\"\" # Request IDs are intended to be unique. #", "Request ID referred to a fulfilled request whose machine was", "Pub/Sub topic the machine must be subscribed to. pubsub_topic =", "specified. dimensions.hostname must be unique per backend. \"\"\" # Dimensions", "Project the Cloud Pub/Sub subscription exists in. pubsub_subscription_project = messages.StringField(5)", "the LeaseRequest. state = messages.EnumField(LeaseRequestState, 4) # Hostname of the", "the client to generate the LeaseRequest for the # machine", "a LeaseRequest.\"\"\" # Per-user unique ID used to identify the", "= messages.MessageField( CatalogManipulationResponse, 1, repeated=True) class LeaseRequest(messages.Message): \"\"\"Represents a request", "response refers to. request_hash = messages.StringField(1) # LeaseReleaseRequestError indicating an", "request, or None # if there is no error. error", "lease_expiration_ts both specified. MUTUAL_EXCLUSION_ERROR = 7 # Proposed duration was", "LeaseRequest. client_request_id = messages.StringField(3, required=True) # State of the LeaseRequest.", "class CatalogManipulationRequestError(messages.Enum): \"\"\"Represents an error in a catalog manipulation request.\"\"\"", "backend. \"\"\" # CatalogMachineAdditionRequest instances to batch together. requests =", "LeaseReleaseRequest.\"\"\" # Request ID referred to non-existent request for this", "= 2 # LeaseRequest has been denied. DENIED = 3", "LeaseRequest has been denied. DENIED = 3 class LeaseResponse(messages.Message): \"\"\"Represents", "request_id = messages.StringField(1, required=True) # Instruction to send the leased", "this source code is governed under the Apache License, Version", "class PollResponse(messages.Message): \"\"\"Represents a response to a request for instructions", "= messages.MessageField( CatalogMachineAdditionRequest, 1, repeated=True) class CatalogMachineDeletionRequest(messages.Message): \"\"\"Represents a request", "# Requested lease duration is too long. LEASE_TOO_LONG = 11", "to send an instruction to a leased machine.\"\"\" # Request", "a MachineInstructionRequest.\"\"\" # Request ID referred to an unfulfilled request.", "required=True) class BatchedLeaseReleaseRequest(messages.Message): \"\"\"Represents a batched set of lease release", "Cloud Pub/Sub topic was invalid. INVALID_TOPIC = 2 # Proposed", "a hostname. UNSPECIFIED_HOSTNAME = 5 # Proposed Cloud Pub/Sub topic", "to batch together. requests = messages.MessageField( CatalogMachineAdditionRequest, 1, repeated=True) class", "was invalid. INVALID_TOPIC = 2 # Proposed Cloud Pub/Sub project", "= 2 # Didn't specify a backend. UNSPECIFIED_BACKEND = 3", "the machine to retrieve. hostname = messages.StringField(1, required=True) # Backend", "that didn't exist. ENTRY_NOT_FOUND = 2 # Didn't specify a", "= 2 # Proposed Cloud Pub/Sub project was invalid. INVALID_PROJECT", "= messages.StringField(3, required=True) # State of the LeaseRequest. state =", "repeated=True) class LeaseReleaseRequest(messages.Message): \"\"\"Represents a request to voluntarily cancel a", "# Instructions to give the machine once it's been leased.", "a catalog machine retrieval request.\"\"\" # Dimensions instance specifying what", "specifying machine-specific configuration. policies = messages.MessageField(Policies, 2, required=True) class CatalogMachineBatchAdditionRequest(messages.Message):", "must be unique in the catalog. HOSTNAME_REUSE = 1 #", "DEADLINE_EXCEEDED = 5 # Miscellaneous transient error. TRANSIENT_ERROR = 6", "response is in reference to. machine_deletion_request = messages.MessageField( CatalogMachineDeletionRequest, 3)" ]
[ "print(url.geturl()) print(url.info()) print(url.getcode()) # Analise o html na variável 'page'", "na variável 'page' e armazene-o no formato Beautiful Soup soup", "Soup soup = BeautifulSoup(page, 'html.parser') #print(soup.prettify()) print(soup.title) print(soup.title.string) print(soup.title.name) soup_a", "'page' e armazene-o no formato Beautiful Soup soup = BeautifulSoup(page,", "<reponame>carvalho-fdec/DesafioDSA # webscraping test import urllib.request from bs4 import BeautifulSoup", "test import urllib.request from bs4 import BeautifulSoup with urllib.request.urlopen('http://www.netvasco.com.br') as", "o html na variável 'page' e armazene-o no formato Beautiful", "variável 'page' e armazene-o no formato Beautiful Soup soup =", "as url: page = url.read() #print(page) print(url.geturl()) print(url.info()) print(url.getcode()) #", "urllib.request.urlopen('http://www.netvasco.com.br') as url: page = url.read() #print(page) print(url.geturl()) print(url.info()) print(url.getcode())", "import urllib.request from bs4 import BeautifulSoup with urllib.request.urlopen('http://www.netvasco.com.br') as url:", "= url.read() #print(page) print(url.geturl()) print(url.info()) print(url.getcode()) # Analise o html", "urllib.request from bs4 import BeautifulSoup with urllib.request.urlopen('http://www.netvasco.com.br') as url: page", "print(soup.title) print(soup.title.string) print(soup.title.name) soup_a = soup.find_all('a')[:10] for a in soup_a:", "BeautifulSoup with urllib.request.urlopen('http://www.netvasco.com.br') as url: page = url.read() #print(page) print(url.geturl())", "# webscraping test import urllib.request from bs4 import BeautifulSoup with", "soup = BeautifulSoup(page, 'html.parser') #print(soup.prettify()) print(soup.title) print(soup.title.string) print(soup.title.name) soup_a =", "#print(page) print(url.geturl()) print(url.info()) print(url.getcode()) # Analise o html na variável", "print(url.info()) print(url.getcode()) # Analise o html na variável 'page' e", "from bs4 import BeautifulSoup with urllib.request.urlopen('http://www.netvasco.com.br') as url: page =", "import BeautifulSoup with urllib.request.urlopen('http://www.netvasco.com.br') as url: page = url.read() #print(page)", "webscraping test import urllib.request from bs4 import BeautifulSoup with urllib.request.urlopen('http://www.netvasco.com.br')", "= BeautifulSoup(page, 'html.parser') #print(soup.prettify()) print(soup.title) print(soup.title.string) print(soup.title.name) soup_a = soup.find_all('a')[:10]", "#print(soup.prettify()) print(soup.title) print(soup.title.string) print(soup.title.name) soup_a = soup.find_all('a')[:10] for a in", "with urllib.request.urlopen('http://www.netvasco.com.br') as url: page = url.read() #print(page) print(url.geturl()) print(url.info())", "url.read() #print(page) print(url.geturl()) print(url.info()) print(url.getcode()) # Analise o html na", "print(url.getcode()) # Analise o html na variável 'page' e armazene-o", "no formato Beautiful Soup soup = BeautifulSoup(page, 'html.parser') #print(soup.prettify()) print(soup.title)", "print(soup.title.string) print(soup.title.name) soup_a = soup.find_all('a')[:10] for a in soup_a: print(a.get('href'))", "url: page = url.read() #print(page) print(url.geturl()) print(url.info()) print(url.getcode()) # Analise", "BeautifulSoup(page, 'html.parser') #print(soup.prettify()) print(soup.title) print(soup.title.string) print(soup.title.name) soup_a = soup.find_all('a')[:10] for", "'html.parser') #print(soup.prettify()) print(soup.title) print(soup.title.string) print(soup.title.name) soup_a = soup.find_all('a')[:10] for a", "formato Beautiful Soup soup = BeautifulSoup(page, 'html.parser') #print(soup.prettify()) print(soup.title) print(soup.title.string)", "bs4 import BeautifulSoup with urllib.request.urlopen('http://www.netvasco.com.br') as url: page = url.read()", "armazene-o no formato Beautiful Soup soup = BeautifulSoup(page, 'html.parser') #print(soup.prettify())", "print(soup.title.name) soup_a = soup.find_all('a')[:10] for a in soup_a: print(a.get('href')) print(a.get_text())", "e armazene-o no formato Beautiful Soup soup = BeautifulSoup(page, 'html.parser')", "Analise o html na variável 'page' e armazene-o no formato", "html na variável 'page' e armazene-o no formato Beautiful Soup", "# Analise o html na variável 'page' e armazene-o no", "Beautiful Soup soup = BeautifulSoup(page, 'html.parser') #print(soup.prettify()) print(soup.title) print(soup.title.string) print(soup.title.name)", "page = url.read() #print(page) print(url.geturl()) print(url.info()) print(url.getcode()) # Analise o" ]
[ "them back out), so we provide a fake multiplexer instead.", "self.assertEqual(sample.max_length, 6 + 2) self.assertEqual(sample.description, \"bottom-left\") self.assertEqual(sample.display_name, \"\") with self.subTest(\"filters", "2.0 (the \"License\"); # you may not use this file", ") scalar_summary.scalar(\"cube\", i ** 3, step=3 * i) logdir =", "the interface. multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir", "We can't control the timestamps of events written to disk", "xrange(10): scalar_summary.scalar( \"square\", i ** 2, step=2 * i, description=\"boxen\"", "* i, description=\"boxen\" ) scalar_summary.scalar(\"cube\", i ** 3, step=3 *", ") def create_multiplexer(self): multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() return multiplexer", "self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] ) sample =", "= tf.tile(image_1x1, [k, i, i, 1]) image_summary.image( name, image, step=i,", "tensor_util.make_ndarray(event.tensor_proto).item(), ) def test_read_scalars_downsamples(self): # TODO(@wchargin): Verify that this always", "control the timestamps of events written to disk (without #", "six.assertRaisesRegex( self, ValueError, \"can only convert an array of size", "context.RequestContext() logdir = os.path.join(self.logdir, \"polynomials\") with tf.summary.create_file_writer(logdir).as_default(): for i in", "test_read_blob_sequences_and_read_blob(self): provider = self.create_provider() with self.subTest(\"reads all time series for", "self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME,", "multiplexer.Tensors(run, tag) self.assertLen(result[run][tag], len(tensor_events)) for (datum, event) in zip(result[run][tag], tensor_events):", "result = provider.data_location(self.ctx, experiment_id=\"unused\") self.assertEqual(result, self.logdir) def test_list_plugins_with_no_graph(self): provider =", "create_provider(self): multiplexer = self.create_multiplexer() return data_provider.MultiplexerDataProvider(multiplexer, self.logdir) def test_data_location(self): provider", "disk (without # manually reading the tfrecords, modifying the data,", "multiplexer, self.logdir ) result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3,", "the scalars plugin. metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"marigraphs\" metadata.data_class", "plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\", \"square\"]) result", "scalars and tensors use the same underlying # filtering implementation.", "import print_function import os import six from six.moves import xrange", ") result = provider.list_runs(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ base_provider.Run( run_id=run,", "self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) for run in result:", "experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"], [\"uniform\"] ), ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(),", "tensorboard.plugins.image import metadata as image_metadata from tensorboard.plugins.image import summary_v2 as", "provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"lebesgue\"][\"uniform\"], 3) def test_list_blob_sequences(self):", "a `k`-sample image summary of `i`-by-`i` images image = tf.tile(image_1x1,", "# Summary with rank-0 data but not owned by the", "- i) # 1, .., 6, .., 2 # a", "multiplexer, \"fake_logdir\" ) result = provider.list_runs(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [", "self.assertEqual(actual, expected) def test_underlong_ok(self): xs = list(\"abcdefg\") actual = data_provider._downsample(xs,", "License for the specific language governing permissions and # limitations", "self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(),", "\"\"\"Unit tests for `tensorboard.backend.event_processing.data_provider`.\"\"\" from __future__ import absolute_import from __future__", "Reserved. # # Licensed under the Apache License, Version 2.0", "6 - abs(6 - i) # 1, .., 6, ..,", "multiplexer = self.create_multiplexer() return data_provider.MultiplexerDataProvider(multiplexer, self.logdir) def test_data_location(self): provider =", "provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(),", "metadata as graph_metadata from tensorboard.plugins.histogram import metadata as histogram_metadata from", "result is None: raise ValueError(\"No event timestep could be found\")", "self.assertEqual(actual, sorted(actual)) def test_zero(self): xs = \"abcdefg\" actual = data_provider._downsample(xs,", "base_provider.BlobSequenceDatum, ) class DownsampleTest(tf.test.TestCase): \"\"\"Tests for the `_downsample` private helper", "= result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to test", "import summary_v2 as scalar_summary from tensorboard.plugins.image import metadata as image_metadata", "by V2 summary ops self.assertEqual(sample.description, \"boxen\") def test_list_scalars_filters(self): provider =", "provider.list_runs(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ base_provider.Run( run_id=run, run_name=run, start_time=start_time )", "can't be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(sample.max_length, 6 + 2)", "self.logdir ) result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3, )", "\"greetings\", \"marigraphs\", histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_plugins_with_graph(self): with", "experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), downsample=1, )", "timestamps of events written to disk (without # manually reading", "the `_downsample` private helper function.\"\"\" def test_deterministic(self): xs = \"abcdefg\"", "smooth\", (0.0, 0.25, 0.5, 0.75, 1.0), \"uniform\"), (\"very smoothn't\", (0.0,", "base_provider from tensorboard.plugins.graph import metadata as graph_metadata from tensorboard.plugins.histogram import", "1.0), \"bimodal\"), ] for (description, distribution, name) in data: tensor", "in six.iteritems(start_times) ], ) def test_list_scalars_all(self): provider = self.create_provider() result", "tensor=[i, i], step=i, metadata=metadata ) logdir = os.path.join(self.logdir, \"lebesgue\") with", "explicit checks yet. with six.assertRaisesRegex( self, ValueError, \"can only convert", "timestep could be found\") else: return result multiplexer = FakeMultiplexer()", "0.99, 1.0), \"bimodal\"), ] for (description, distribution, name) in data:", "as histogram_metadata from tensorboard.plugins.histogram import summary_v2 as histogram_summary from tensorboard.plugins.scalar", "2019 The TensorFlow Authors. All Rights Reserved. # # Licensed", "self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) sample = result[\"polynomials\"][\"square\"] self.assertIsInstance(sample,", "os.path.join(self.logdir, \"waves\") with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar(\"sine\", tf.sin(float(i)),", "1]) image_summary.image( name, image, step=i, description=description, max_outputs=99, ) def create_multiplexer(self):", "from tensorboard.compat.proto import summary_pb2 from tensorboard.data import provider as base_provider", "result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"])", "OF ANY KIND, either express or implied. # See the", "See the License for the specific language governing permissions and", "start_times) result = start_times[run] if result is None: raise ValueError(\"No", "self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries, ) def test_read_blob_sequences_and_read_blob(self): provider =", "to in writing, software # distributed under the License is", "scalar data class (bad!). metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"greetings\"", "self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( tags=[\"square\",", "import summary_pb2 from tensorboard.data import provider as base_provider from tensorboard.plugins.graph", "MultiplexerDataProviderTest(tf.test.TestCase): def setUp(self): super(MultiplexerDataProviderTest, self).setUp() self.logdir = self.get_temp_dir() self.ctx =", "multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter", "or agreed to in writing, software # distributed under the", "(\"very smooth\", (0.0, 0.25, 0.5, 0.75, 1.0), \"uniform\"), (\"very smoothn't\",", "test_read_scalars_downsamples(self): # TODO(@wchargin): Verify that this always includes the most", "10) self.assertLen(last.values, 2 + 2) blobs = [ provider.read_blob(self.ctx, blob_key=v.blob_key)", "\"marigraphs\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"high_tide\", tensor=i, step=i, metadata=metadata )", "_ in range(100): actual = data_provider._downsample(xs, k=4) self.assertEqual(actual, expected) def", "from __future__ import print_function import os import six from six.moves", "test_read_tensors_downsamples(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir )", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "1.0), \"uniform\"), (\"very smoothn't\", (0.0, 0.01, 0.99, 1.0), \"bimodal\"), ]", "plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(),", "\"bimodal\"]) sample = result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample, base_provider.TensorTimeSeries) self.assertEqual(sample.max_step, 10) # nothing", "with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"very smooth\", (0.0, 0.25, 0.5,", "[ provider.read_blob(self.ctx, blob_key=v.blob_key) for v in last.values ] self.assertEqual(blobs[0], b\"10\")", "= [ (\"red\", (221, 28, 38), \"top-right\"), (\"blue\", (1, 91,", "found\") else: return result multiplexer = FakeMultiplexer() provider = data_provider.MultiplexerDataProvider(", "test_list_tensors_filters(self): provider = self.create_provider() # Quick check only, as scalars", "data_provider.MultiplexerDataProvider( multiplexer, \"fake_logdir\" ) result = provider.list_runs(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result,", "self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"]) def test_read_tensors(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider(", ") sample = result[\"mondrian\"][\"blue\"] self.assertLen(sample, 4) # downsampled from 10", "tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph) writer.flush() provider = self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\")", "not use this file except in compliance with the License.", "experiment_id=\"unused\") self.assertEqual(result, self.logdir) def test_list_plugins_with_no_graph(self): provider = self.create_provider() result =", "you may not use this file except in compliance with", "multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() return multiplexer def create_provider(self): multiplexer = self.create_multiplexer() return", "description) in data: image_1x1 = tf.constant([[[color]]], dtype=tf.uint8) for i in", "downsampled from 10 last = sample[-1] self.assertIsInstance(last, base_provider.BlobSequenceDatum) self.assertEqual(last.step, 10)", "[\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) sample = result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample, base_provider.TensorTimeSeries) self.assertEqual(sample.max_step,", "tags=[\"square\", \"quartic\"] ), ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(),", "in xrange(10): scalar_summary.scalar( \"square\", i ** 2, step=2 * i,", "as specified by the interface. multiplexer = self.create_multiplexer() provider =", "provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"waves\"][\"sine\"], 3) def test_read_scalars_but_not_rank_0(self):", "np.testing.assert_equal( datum.numpy, tensor_util.make_ndarray(event.tensor_proto), ) def test_read_tensors_downsamples(self): multiplexer = self.create_multiplexer() provider", "tags=[\"uniform\", \"bimodal\"], ) result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter,", "[\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) for run in result: for", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", ") def test_list_plugins_with_graph(self): with tf.compat.v1.Graph().as_default() as graph: writer = tf.compat.v1.summary.FileWriter(self.logdir)", "provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ),", "blue2 = blobs[3] red1 = provider.read_blob( self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, ) self.assertEqual(blue1,", "test_list_scalars_all(self): provider = self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME,", "provider = self.create_provider() with self.subTest(\"finds all time series for a", "self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) sample = result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample, base_provider.TensorTimeSeries) self.assertEqual(sample.max_step, 10)", "= self.create_provider() # Quick check only, as scalars and tensors", "series for a plugin\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME,", ") def test_list_runs(self): # We can't control the timestamps of", "FakeMultiplexer(object): def Runs(multiplexer): result = [\"second_2\", \"first\", \"no_time\", \"second_1\"] self.assertItemsEqual(result,", "18) # nothing to test for wall time, as it", "(description, distribution, name) in data: tensor = tf.constant([distribution], dtype=tf.float64) for", "if result is None: raise ValueError(\"No event timestep could be", "tf.sign(tf.sin(float(i))), step=i ) # Summary with rank-0 data but not", "self.assertEqual( sample.display_name, \"\" ) # not written by V2 summary", "six.moves import xrange # pylint: disable=redefined-builtin import numpy as np", "class DownsampleTest(tf.test.TestCase): \"\"\"Tests for the `_downsample` private helper function.\"\"\" def", "scalar_summary.scalar(\"cube\", i ** 3, step=3 * i) logdir = os.path.join(self.logdir,", "scalar_metadata.PLUGIN_NAME, ], ) def test_list_plugins_with_graph(self): with tf.compat.v1.Graph().as_default() as graph: writer", "i, description=\"boxen\" ) scalar_summary.scalar(\"cube\", i ** 3, step=3 * i)", "most # recent datum, as specified by the interface. multiplexer", "data = [ (\"red\", (221, 28, 38), \"top-right\"), (\"blue\", (1,", "\"square\", tf.sign(tf.sin(float(i))), step=i ) # Summary with rank-0 data but", "experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( tags=[\"square\", \"quartic\"] ), ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"])", "zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) np.testing.assert_equal( datum.numpy, tensor_util.make_ndarray(event.tensor_proto), )", "data, and writing # them back out), so we provide", ") self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) for run in result:", "k=10) expected = list(\"abcdefg\") self.assertIsNot(actual, xs) self.assertEqual(actual, expected) def test_inorder(self):", "for (datum, event) in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time)", "[ (\"red\", (221, 28, 38), \"top-right\"), (\"blue\", (1, 91, 158),", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "step=i, description=description ) logdir = os.path.join(self.logdir, \"mondrian\") with tf.summary.create_file_writer(logdir).as_default(): data", "self).setUp() self.logdir = self.get_temp_dir() self.ctx = context.RequestContext() logdir = os.path.join(self.logdir,", "summary_v2 as image_summary from tensorboard.util import tensor_util import tensorflow.compat.v1 as", "blue1 = blobs[2] blue2 = blobs[3] red1 = provider.read_blob( self.ctx,", "import absolute_import from __future__ import division from __future__ import print_function", "xrange(10): scalar_summary.scalar(\"sine\", tf.sin(float(i)), step=i) scalar_summary.scalar( \"square\", tf.sign(tf.sin(float(i))), step=i ) #", "class MultiplexerDataProviderTest(tf.test.TestCase): def setUp(self): super(MultiplexerDataProviderTest, self).setUp() self.logdir = self.get_temp_dir() self.ctx", "], ) def test_list_plugins_with_graph(self): with tf.compat.v1.Graph().as_default() as graph: writer =", "image = tf.tile(image_1x1, [k, i, i, 1]) image_summary.image( name, image,", "base_provider.Run( run_id=run, run_name=run, start_time=start_time ) for (run, start_time) in six.iteritems(start_times)", "file except in compliance with the License. # You may", "a non-monotonic sequence of sample sizes to # test `max_length`", "), downsample=1, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum,", "self.assertEqual(blobs[1], b\"10\") self.assertStartsWith(blobs[2], b\"\\x89PNG\") self.assertStartsWith(blobs[3], b\"\\x89PNG\") blue1 = blobs[2] blue2", "\"second_1\"] self.assertItemsEqual(result, start_times) return result def FirstEventTimestamp(multiplexer, run): self.assertIn(run, start_times)", "= tf.constant([distribution], dtype=tf.float64) for i in xrange(1, 11): histogram_summary.histogram( name,", "under the License. # ============================================================================== \"\"\"Unit tests for `tensorboard.backend.event_processing.data_provider`.\"\"\" from", "self.assertLen(result[\"lebesgue\"][\"uniform\"], 3) def test_list_blob_sequences(self): provider = self.create_provider() with self.subTest(\"finds all", "[\"second_2\", \"first\", \"no_time\", \"second_1\"] self.assertItemsEqual(result, start_times) return result def FirstEventTimestamp(multiplexer,", "by the interface. multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer,", "actual = data_provider._downsample(xs, k=0) self.assertEqual(actual, []) if __name__ == \"__main__\":", "3) def test_list_blob_sequences(self): provider = self.create_provider() with self.subTest(\"finds all time", "\"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\",", "name) in data: tensor = tf.constant([distribution], dtype=tf.float64) for i in", "by run/tag\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\",", "not written by V2 summary ops self.assertEqual(sample.description, \"very smooth\") def", "self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) sample", "name, tensor * i, step=i, description=description ) logdir = os.path.join(self.logdir,", "\"bottom-left\") self.assertEqual(sample.display_name, \"\") with self.subTest(\"filters by run/tag\"): result = provider.list_blob_sequences(", "# pylint: disable=redefined-builtin import numpy as np from tensorboard import", "data class (bad!). metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"greetings\" metadata.data_class", "with rank-0 data but not owned by the scalars plugin.", "import metadata as histogram_metadata from tensorboard.plugins.histogram import summary_v2 as histogram_summary", "[\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) sample = result[\"polynomials\"][\"square\"] self.assertIsInstance(sample, base_provider.ScalarTimeSeries)", "KIND, either express or implied. # See the License for", "for the `_downsample` private helper function.\"\"\" def test_deterministic(self): xs =", "def test_list_tensors_all(self): provider = self.create_provider() result = provider.list_tensors( self.ctx, experiment_id=\"unused\",", "[ (\"very smooth\", (0.0, 0.25, 0.5, 0.75, 1.0), \"uniform\"), (\"very", "11): # Use a non-monotonic sequence of sample sizes to", "event.step) self.assertEqual(datum.wall_time, event.wall_time) self.assertEqual( datum.value, tensor_util.make_ndarray(event.tensor_proto).item(), ) def test_read_scalars_downsamples(self): #", "plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), ) self.assertItemsEqual(result.keys(), [\"mondrian\"])", "name, image, step=i, description=description, max_outputs=99, ) def create_multiplexer(self): multiplexer =", "result multiplexer = FakeMultiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, \"fake_logdir\" )", "provider = self.create_provider() with self.subTest(\"reads all time series for a", "create_multiplexer(self): multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() return multiplexer def create_provider(self):", "1, .., 6, .., 2 # a `k`-sample image summary", "b\"10\") self.assertEqual(blobs[1], b\"10\") self.assertStartsWith(blobs[2], b\"\\x89PNG\") self.assertStartsWith(blobs[3], b\"\\x89PNG\") blue1 = blobs[2]", "i in xrange(10): scalar_summary.scalar(\"sine\", tf.sin(float(i)), step=i) scalar_summary.scalar( \"square\", tf.sign(tf.sin(float(i))), step=i", "= os.path.join(self.logdir, \"mondrian\") with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"red\", (221,", "(the \"License\"); # you may not use this file except", "i ** 3, step=3 * i) logdir = os.path.join(self.logdir, \"waves\")", "self.logdir ) result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3, )", "tensorboard.plugins.histogram import metadata as histogram_metadata from tensorboard.plugins.histogram import summary_v2 as", "1 to a Python scalar\", ): provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=\"greetings\",", "by V2 summary ops self.assertEqual(sample.description, \"very smooth\") def test_list_tensors_filters(self): provider", "self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, downsample=4, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\",", "result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to test for", "# # Unless required by applicable law or agreed to", "is None: raise ValueError(\"No event timestep could be found\") else:", "to # test `max_length` calculation. k = 6 - abs(6", "to disk (without # manually reading the tfrecords, modifying the", "underlying # filtering implementation. result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME,", "self.assertItemsEqual( result, [ base_provider.Run( run_id=run, run_name=run, start_time=start_time ) for (run,", "with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"red\", (221, 28, 38), \"top-right\"),", "image summary of `i`-by-`i` images image = tf.tile(image_1x1, [k, i,", "# Use a non-monotonic sequence of sample sizes to #", "self.assertLen(sample, 4) # downsampled from 10 last = sample[-1] self.assertIsInstance(last,", "\"bottom-left\"), (\"yellow\", (239, 220, 111), \"bottom-right\"), ] for (name, color,", "implied. # See the License for the specific language governing", "\"fake_logdir\" ) result = provider.list_runs(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ base_provider.Run(", "os.path.join(self.logdir, \"lebesgue\") with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"very smooth\", (0.0,", "else: return result multiplexer = FakeMultiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer,", "last.values ] self.assertEqual(blobs[0], b\"10\") self.assertEqual(blobs[1], b\"10\") self.assertStartsWith(blobs[2], b\"\\x89PNG\") self.assertStartsWith(blobs[3], b\"\\x89PNG\")", "self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) for", "return multiplexer def create_provider(self): multiplexer = self.create_multiplexer() return data_provider.MultiplexerDataProvider(multiplexer, self.logdir)", "= start_times[run] if result is None: raise ValueError(\"No event timestep", "\"square\", \"cube\", \"iridescence\"], ) result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME,", "result, [ base_provider.Run( run_id=run, run_name=run, start_time=start_time ) for (run, start_time)", "for v in last.values ] self.assertEqual(blobs[0], b\"10\") self.assertEqual(blobs[1], b\"10\") self.assertStartsWith(blobs[2],", "\"second_1\": 2.0, } class FakeMultiplexer(object): def Runs(multiplexer): result = [\"second_2\",", "experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"waves\"][\"sine\"], 3) def test_read_scalars_but_not_rank_0(self): provider =", "a Python scalar\", ): provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=\"greetings\", run_tag_filter=run_tag_filter, downsample=100,", "Unless required by applicable law or agreed to in writing,", "not owned by the scalars plugin. metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name", "i], step=i, metadata=metadata ) logdir = os.path.join(self.logdir, \"lebesgue\") with tf.summary.create_file_writer(logdir).as_default():", "with self.subTest(\"finds all time series for a plugin\"): result =", "tensor * i, step=i, description=description ) logdir = os.path.join(self.logdir, \"mondrian\")", "the specific language governing permissions and # limitations under the", "\"abcdefg\" actual = data_provider._downsample(xs, k=0) self.assertEqual(actual, []) if __name__ ==", "run_tag_filter = base_provider.RunTagFilter( runs=[\"waves\", \"polynomials\", \"unicorns\"], tags=[\"sine\", \"square\", \"cube\", \"iridescence\"],", "= 6 - abs(6 - i) # 1, .., 6,", "= self.create_provider() run_tag_filter = base_provider.RunTagFilter([\"waves\"], [\"bad\"]) # No explicit checks", "\"bimodal\"], ) result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100,", "self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertLen(sample,", "= os.path.join(self.logdir, \"lebesgue\") with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"very smooth\",", "self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter(", "[\"square\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx,", "self.create_provider() result = provider.data_location(self.ctx, experiment_id=\"unused\") self.assertEqual(result, self.logdir) def test_list_plugins_with_no_graph(self): provider", "\"second_2\": 2.0, \"first\": 1.5, \"no_time\": None, \"second_1\": 2.0, } class", "def FirstEventTimestamp(multiplexer, run): self.assertIn(run, start_times) result = start_times[run] if result", "but not owned by the scalars plugin. metadata = summary_pb2.SummaryMetadata()", "logdir = os.path.join(self.logdir, \"mondrian\") with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"red\",", "], ) def test_list_scalars_all(self): provider = self.create_provider() result = provider.list_scalars(", "return data_provider.MultiplexerDataProvider(multiplexer, self.logdir) def test_data_location(self): provider = self.create_provider() result =", "self.assertStartsWith(blobs[3], b\"\\x89PNG\") blue1 = blobs[2] blue2 = blobs[3] red1 =", "expected) def test_inorder(self): xs = list(range(10000)) actual = data_provider._downsample(xs, k=100)", "tf1 import tensorflow.compat.v2 as tf tf1.enable_eager_execution() class MultiplexerDataProviderTest(tf.test.TestCase): def setUp(self):", "xs = list(\"abcdefg\") actual = data_provider._downsample(xs, k=10) expected = list(\"abcdefg\")", "self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(sample.max_length, 6 + 2) self.assertEqual(sample.description, \"bottom-left\") self.assertEqual(sample.display_name, \"\")", "tensorboard.compat.proto import summary_pb2 from tensorboard.data import provider as base_provider from", "i ** 2, step=2 * i, description=\"boxen\" ) scalar_summary.scalar(\"cube\", i", "tensor_util.make_ndarray(event.tensor_proto), ) def test_read_tensors_downsamples(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider(", "result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", graph_metadata.PLUGIN_NAME,", "as scalar_metadata from tensorboard.plugins.scalar import summary_v2 as scalar_summary from tensorboard.plugins.image", "\"abcdefg\" expected = data_provider._downsample(xs, k=4) for _ in range(100): actual", "data_provider._downsample(xs, k=100) self.assertEqual(actual, sorted(actual)) def test_zero(self): xs = \"abcdefg\" actual", "from tensorboard.backend.event_processing import data_provider from tensorboard.backend.event_processing import ( plugin_event_multiplexer as", "3, step=3 * i) logdir = os.path.join(self.logdir, \"waves\") with tf.summary.create_file_writer(logdir).as_default():", "result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\",", "provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result = provider.read_tensors( self.ctx,", "Verify that this always includes the most # recent datum,", "= provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, downsample=4, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(", "(run, start_time) in six.iteritems(start_times) ], ) def test_list_scalars_all(self): provider =", "tf.tile(image_1x1, [k, i, i, 1]) image_summary.image( name, image, step=i, description=description,", "self.assertIsInstance( result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries, ) def test_read_blob_sequences_and_read_blob(self): provider = self.create_provider() with", "tensor = tf.constant([distribution], dtype=tf.float64) for i in xrange(1, 11): histogram_summary.histogram(", "absolute_import from __future__ import division from __future__ import print_function import", "runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), downsample=1, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(),", "provider = self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"],", "a plugin\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, ) self.assertItemsEqual(result.keys(),", "event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() return multiplexer def create_provider(self): multiplexer = self.create_multiplexer()", "for (run, start_time) in six.iteritems(start_times) ], ) def test_list_scalars_all(self): provider", "from tensorboard.util import tensor_util import tensorflow.compat.v1 as tf1 import tensorflow.compat.v2", "manually reading the tfrecords, modifying the data, and writing #", "tensorflow.compat.v2 as tf tf1.enable_eager_execution() class MultiplexerDataProviderTest(tf.test.TestCase): def setUp(self): super(MultiplexerDataProviderTest, self).setUp()", "base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to test for wall time,", "\"green't\"] ), downsample=1, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0],", "[\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\", \"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME,", "test_underlong_ok(self): xs = list(\"abcdefg\") actual = data_provider._downsample(xs, k=10) expected =", "for `tensorboard.backend.event_processing.data_provider`.\"\"\" from __future__ import absolute_import from __future__ import division", "test_list_plugins_with_graph(self): with tf.compat.v1.Graph().as_default() as graph: writer = tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph) writer.flush()", "tensorboard.plugins.graph import metadata as graph_metadata from tensorboard.plugins.histogram import metadata as", "You may obtain a copy of the License at #", "start_times) return result def FirstEventTimestamp(multiplexer, run): self.assertIn(run, start_times) result =", "result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(", "[k, i, i, 1]) image_summary.image( name, image, step=i, description=description, max_outputs=99,", "0.25, 0.5, 0.75, 1.0), \"uniform\"), (\"very smoothn't\", (0.0, 0.01, 0.99,", "plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), downsample=1, ) self.assertItemsEqual(result.keys(),", "not written by V2 summary ops self.assertEqual(sample.description, \"boxen\") def test_list_scalars_filters(self):", "multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"lebesgue\"], tags=[\"uniform\", \"bimodal\"], )", "step=i, description=description, max_outputs=99, ) def create_multiplexer(self): multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir)", ") self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\", \"square\"]) result = provider.list_scalars( self.ctx,", "summary of `i`-by-`i` images image = tf.tile(image_1x1, [k, i, i,", "import six from six.moves import xrange # pylint: disable=redefined-builtin import", "2.0, \"first\": 1.5, \"no_time\": None, \"second_1\": 2.0, } class FakeMultiplexer(object):", "event) in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) np.testing.assert_equal( datum.numpy,", "setUp(self): super(MultiplexerDataProviderTest, self).setUp() self.logdir = self.get_temp_dir() self.ctx = context.RequestContext() logdir", "(0.0, 0.01, 0.99, 1.0), \"bimodal\"), ] for (description, distribution, name)", ") sample = result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step, 10) # nothing", "self.assertIsInstance(sample, base_provider.TensorTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to test for wall", "\"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) sample = result[\"polynomials\"][\"square\"] self.assertIsInstance(sample, base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "\"sine\"]) sample = result[\"polynomials\"][\"square\"] self.assertIsInstance(sample, base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step, 18) # nothing", "and tensors use the same underlying # filtering implementation. result", "it can't be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(sample.max_length, 6 +", "** 2, step=2 * i, description=\"boxen\" ) scalar_summary.scalar(\"cube\", i **", "= os.path.join(self.logdir, \"polynomials\") with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar(", "i in xrange(10): scalar_summary.scalar( \"square\", i ** 2, step=2 *", "run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"], [\"uniform\"] ), ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"]) def", "as graph: writer = tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph) writer.flush() provider = self.create_provider()", "\"hugs\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\", \"square\"]) result = provider.list_scalars(", "permissions and # limitations under the License. # ============================================================================== \"\"\"Unit", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "run_id=run, run_name=run, start_time=start_time ) for (run, start_time) in six.iteritems(start_times) ],", "License. # You may obtain a copy of the License", ") def test_read_scalars_downsamples(self): # TODO(@wchargin): Verify that this always includes", "start_times[run] if result is None: raise ValueError(\"No event timestep could", "test_read_tensors(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir )", "expected = data_provider._downsample(xs, k=4) for _ in range(100): actual =", "220, 111), \"bottom-right\"), ] for (name, color, description) in data:", "time, as it can't be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(", "max_outputs=99, ) def create_multiplexer(self): multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() return", "self.assertEqual(datum.wall_time, event.wall_time) self.assertEqual( datum.value, tensor_util.make_ndarray(event.tensor_proto).item(), ) def test_read_scalars_downsamples(self): # TODO(@wchargin):", "written to disk (without # manually reading the tfrecords, modifying", "from tensorboard.data import provider as base_provider from tensorboard.plugins.graph import metadata", "= provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"])", "os.path.join(self.logdir, \"polynomials\") with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar( \"square\",", "def test_zero(self): xs = \"abcdefg\" actual = data_provider._downsample(xs, k=0) self.assertEqual(actual,", "as scalar_summary from tensorboard.plugins.image import metadata as image_metadata from tensorboard.plugins.image", "governing permissions and # limitations under the License. # ==============================================================================", "[\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum, ) class DownsampleTest(tf.test.TestCase): \"\"\"Tests", "TensorFlow Authors. All Rights Reserved. # # Licensed under the", "as tf tf1.enable_eager_execution() class MultiplexerDataProviderTest(tf.test.TestCase): def setUp(self): super(MultiplexerDataProviderTest, self).setUp() self.logdir", "provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( tags=[\"square\", \"quartic\"] ), ) self.assertItemsEqual(result.keys(),", "for (description, distribution, name) in data: tensor = tf.constant([distribution], dtype=tf.float64)", "self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]),", "# not written by V2 summary ops self.assertEqual(sample.description, \"boxen\") def", "def test_list_scalars_all(self): provider = self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\",", "= list(\"abcdefg\") self.assertIsNot(actual, xs) self.assertEqual(actual, expected) def test_inorder(self): xs =", "def test_read_tensors_downsamples(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir", "self.assertEqual(sample.plugin_content, b\"\") self.assertEqual( sample.display_name, \"\" ) # not written by", "self.assertEqual(result, {}) def test_read_scalars(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider(", "nothing to test for wall time, as it can't be", "6 + 2) self.assertEqual(sample.description, \"bottom-left\") self.assertEqual(sample.display_name, \"\") with self.subTest(\"filters by", "result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, downsample=4, ) self.assertItemsEqual(result.keys(), [\"mondrian\"])", "out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual( sample.display_name, \"\" ) # not written", "event) in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) self.assertEqual( datum.value,", "we provide a fake multiplexer instead. start_times = { \"second_2\":", "tensorboard.plugins.scalar import metadata as scalar_metadata from tensorboard.plugins.scalar import summary_v2 as", "import ( plugin_event_multiplexer as event_multiplexer, ) from tensorboard.compat.proto import summary_pb2", "self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) np.testing.assert_equal( datum.numpy, tensor_util.make_ndarray(event.tensor_proto), ) def test_read_tensors_downsamples(self):", "self.assertLen(last.values, 2 + 2) blobs = [ provider.read_blob(self.ctx, blob_key=v.blob_key) for", "def test_read_scalars_downsamples(self): # TODO(@wchargin): Verify that this always includes the", "run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars(", "(0.0, 0.25, 0.5, 0.75, 1.0), \"uniform\"), (\"very smoothn't\", (0.0, 0.01,", "a fake multiplexer instead. start_times = { \"second_2\": 2.0, \"first\":", "plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result =", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries)", "(datum, event) in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) self.assertEqual(", "mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual( sample.display_name, \"\" ) # not", ") # not written by V2 summary ops self.assertEqual(sample.description, \"very", ") self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"]) def test_read_tensors(self): multiplexer = self.create_multiplexer()", "for a plugin\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, downsample=4,", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "def test_list_plugins_with_graph(self): with tf.compat.v1.Graph().as_default() as graph: writer = tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph)", "self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\", \"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\",", "function.\"\"\" def test_deterministic(self): xs = \"abcdefg\" expected = data_provider._downsample(xs, k=4)", "result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum, ) class DownsampleTest(tf.test.TestCase): \"\"\"Tests for the `_downsample` private", "def test_deterministic(self): xs = \"abcdefg\" expected = data_provider._downsample(xs, k=4) for", ") run_tag_filter = base_provider.RunTagFilter( runs=[\"lebesgue\"], tags=[\"uniform\", \"bimodal\"], ) result =", "self, ValueError, \"can only convert an array of size 1", "language governing permissions and # limitations under the License. #", "[\"uniform\"]) def test_read_tensors(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer,", "self.assertEqual(datum.wall_time, event.wall_time) np.testing.assert_equal( datum.numpy, tensor_util.make_ndarray(event.tensor_proto), ) def test_read_tensors_downsamples(self): multiplexer =", "required by applicable law or agreed to in writing, software", "= provider.data_location(self.ctx, experiment_id=\"unused\") self.assertEqual(result, self.logdir) def test_list_plugins_with_no_graph(self): provider = self.create_provider()", "data_provider._downsample(xs, k=4) for _ in range(100): actual = data_provider._downsample(xs, k=4)", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# manually reading the tfrecords, modifying the data, and writing", "image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_plugins_with_graph(self): with tf.compat.v1.Graph().as_default() as graph:", "writing # them back out), so we provide a fake", "= self.create_provider() with self.subTest(\"reads all time series for a plugin\"):", "(datum, event) in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) np.testing.assert_equal(", "0.75, 1.0), \"uniform\"), (\"very smoothn't\", (0.0, 0.01, 0.99, 1.0), \"bimodal\"),", ") self.assertEqual(result, {}) def test_read_scalars(self): multiplexer = self.create_multiplexer() provider =", "[\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"]) def test_read_tensors(self): multiplexer = self.create_multiplexer() provider =", "tag in result[run]: tensor_events = multiplexer.Tensors(run, tag) self.assertLen(result[run][tag], len(tensor_events)) for", "agreed to in writing, software # distributed under the License", "distributed under the License is distributed on an \"AS IS\"", "description=\"boxen\" ) scalar_summary.scalar(\"cube\", i ** 3, step=3 * i) logdir", "2) self.assertEqual(sample.description, \"bottom-left\") self.assertEqual(sample.display_name, \"\") with self.subTest(\"filters by run/tag\"): result", "= self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]),", "= provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( tags=[\"square\", \"quartic\"] ), )", ") def test_read_tensors_downsamples(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer,", "2) blobs = [ provider.read_blob(self.ctx, blob_key=v.blob_key) for v in last.values", "always includes the most # recent datum, as specified by", "result[run]: tensor_events = multiplexer.Tensors(run, tag) self.assertLen(result[run][tag], len(tensor_events)) for (datum, event)", "time series for a plugin\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\",", "import numpy as np from tensorboard import context from tensorboard.backend.event_processing", "No explicit checks yet. with six.assertRaisesRegex( self, ValueError, \"can only", "= provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]), ) self.assertEqual(result, {})", "base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step, 18) # nothing to test for wall time,", "in range(100): actual = data_provider._downsample(xs, k=4) self.assertEqual(actual, expected) def test_underlong_ok(self):", "\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]), )", "data: image_1x1 = tf.constant([[[color]]], dtype=tf.uint8) for i in xrange(1, 11):", "as base_provider from tensorboard.plugins.graph import metadata as graph_metadata from tensorboard.plugins.histogram", "\"no_time\", \"second_1\"] self.assertItemsEqual(result, start_times) return result def FirstEventTimestamp(multiplexer, run): self.assertIn(run,", "description=description ) logdir = os.path.join(self.logdir, \"mondrian\") with tf.summary.create_file_writer(logdir).as_default(): data =", "xrange # pylint: disable=redefined-builtin import numpy as np from tensorboard", "\"picasso\"], tags=[\"yellow\", \"green't\"] ), downsample=1, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"])", "= provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"], [\"uniform\"] ), )", "= [ (\"very smooth\", (0.0, 0.25, 0.5, 0.75, 1.0), \"uniform\"),", "\"polynomials\", \"unicorns\"], tags=[\"sine\", \"square\", \"cube\", \"iridescence\"], ) result = provider.read_scalars(", "* i) logdir = os.path.join(self.logdir, \"waves\") with tf.summary.create_file_writer(logdir).as_default(): for i", "of events written to disk (without # manually reading the", "DownsampleTest(tf.test.TestCase): \"\"\"Tests for the `_downsample` private helper function.\"\"\" def test_deterministic(self):", "10) # nothing to test for wall time, as it", "histogram_summary from tensorboard.plugins.scalar import metadata as scalar_metadata from tensorboard.plugins.scalar import", "OR CONDITIONS OF ANY KIND, either express or implied. #", "multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result", "self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"])", "the License is distributed on an \"AS IS\" BASIS, #", "[\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( tags=[\"square\", \"quartic\"]", "FirstEventTimestamp(multiplexer, run): self.assertIn(run, start_times) result = start_times[run] if result is", "tf.summary.write( \"bad\", tensor=[i, i], step=i, metadata=metadata ) logdir = os.path.join(self.logdir,", "\"\" ) # not written by V2 summary ops self.assertEqual(sample.description,", "provide a fake multiplexer instead. start_times = { \"second_2\": 2.0,", "[\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]), )", "self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) for run in result: for tag in", "self.subTest(\"filters by run/tag\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(", "= data_provider._downsample(xs, k=100) self.assertEqual(actual, sorted(actual)) def test_zero(self): xs = \"abcdefg\"", ".., 6, .., 2 # a `k`-sample image summary of", "), ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"]) def test_read_tensors(self): multiplexer =", "`_downsample` private helper function.\"\"\" def test_deterministic(self): xs = \"abcdefg\" expected", "calculation. k = 6 - abs(6 - i) # 1,", "= self.create_provider() result = provider.data_location(self.ctx, experiment_id=\"unused\") self.assertEqual(result, self.logdir) def test_list_plugins_with_no_graph(self):", "Runs(multiplexer): result = [\"second_2\", \"first\", \"no_time\", \"second_1\"] self.assertItemsEqual(result, start_times) return", "law or agreed to in writing, software # distributed under", "from tensorboard.plugins.graph import metadata as graph_metadata from tensorboard.plugins.histogram import metadata", "series for a plugin\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME,", "+ 2) blobs = [ provider.read_blob(self.ctx, blob_key=v.blob_key) for v in", "Quick check only, as scalars and tensors use the same", "runs=[\"waves\", \"polynomials\", \"unicorns\"], tags=[\"sine\", \"square\", \"cube\", \"iridescence\"], ) result =", "tests for `tensorboard.backend.event_processing.data_provider`.\"\"\" from __future__ import absolute_import from __future__ import", "0.01, 0.99, 1.0), \"bimodal\"), ] for (description, distribution, name) in", "provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"waves\",", "same underlying # filtering implementation. result = provider.list_tensors( self.ctx, experiment_id=\"unused\",", "this always includes the most # recent datum, as specified", "= [ provider.read_blob(self.ctx, blob_key=v.blob_key) for v in last.values ] self.assertEqual(blobs[0],", "self.logdir) def test_data_location(self): provider = self.create_provider() result = provider.data_location(self.ctx, experiment_id=\"unused\")", "data: tensor = tf.constant([distribution], dtype=tf.float64) for i in xrange(1, 11):", "= provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME,", "[\"uniform\", \"bimodal\"]) sample = result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample, base_provider.TensorTimeSeries) self.assertEqual(sample.max_step, 10) #", "may obtain a copy of the License at # #", "self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, ) self.assertEqual(blue1, blue2) self.assertNotEqual(blue1, red1) with self.subTest(\"filters by", "= provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME,", "tags=[\"yellow\", \"green't\"] ), ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"],", "0.5, 0.75, 1.0), \"uniform\"), (\"very smoothn't\", (0.0, 0.01, 0.99, 1.0),", "expected = list(\"abcdefg\") self.assertIsNot(actual, xs) self.assertEqual(actual, expected) def test_inorder(self): xs", "] for (description, distribution, name) in data: tensor = tf.constant([distribution],", "may not use this file except in compliance with the", "image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_runs(self): # We can't control", "red1 = provider.read_blob( self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, ) self.assertEqual(blue1, blue2) self.assertNotEqual(blue1, red1)", "\"first\": 1.5, \"no_time\": None, \"second_1\": 2.0, } class FakeMultiplexer(object): def", "provider = self.create_provider() result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None,", ") result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, )", "this file except in compliance with the License. # You", "= data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"lebesgue\"], tags=[\"uniform\",", "numpy as np from tensorboard import context from tensorboard.backend.event_processing import", "out), so we provide a fake multiplexer instead. start_times =", "= provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"]", "list(range(10000)) actual = data_provider._downsample(xs, k=100) self.assertEqual(actual, sorted(actual)) def test_zero(self): xs", "# # Licensed under the Apache License, Version 2.0 (the", "= multiplexer.Tensors(run, tag) self.assertLen(result[run][tag], len(tensor_events)) for (datum, event) in zip(result[run][tag],", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(sample.max_length, 6 + 2) self.assertEqual(sample.description,", "self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), downsample=1,", "event.wall_time) self.assertEqual( datum.value, tensor_util.make_ndarray(event.tensor_proto).item(), ) def test_read_scalars_downsamples(self): # TODO(@wchargin): Verify", "as scalars and tensors use the same underlying # filtering", "[\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) for run", ") for (run, start_time) in six.iteritems(start_times) ], ) def test_list_scalars_all(self):", "histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_plugins_with_graph(self): with tf.compat.v1.Graph().as_default() as", "[\"red\", \"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step,", "pylint: disable=redefined-builtin import numpy as np from tensorboard import context", "from tensorboard.backend.event_processing import ( plugin_event_multiplexer as event_multiplexer, ) from tensorboard.compat.proto", "self.assertEqual(sample.display_name, \"\") with self.subTest(\"filters by run/tag\"): result = provider.list_blob_sequences( self.ctx,", "= tf.constant([[[color]]], dtype=tf.uint8) for i in xrange(1, 11): # Use", "test_list_blob_sequences(self): provider = self.create_provider() with self.subTest(\"finds all time series for", ") def test_list_scalars_all(self): provider = self.create_provider() result = provider.list_scalars( self.ctx,", "metadata.plugin_data.plugin_name = \"greetings\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"bad\", tensor=[i, i],", "\"greetings\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"bad\", tensor=[i, i], step=i, metadata=metadata", "3) def test_read_scalars_but_not_rank_0(self): provider = self.create_provider() run_tag_filter = base_provider.RunTagFilter([\"waves\"], [\"bad\"])", "provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME,", "base_provider.RunTagFilter( runs=[\"waves\", \"polynomials\", \"unicorns\"], tags=[\"sine\", \"square\", \"cube\", \"iridescence\"], ) result", "experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\",", "experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) sample", "\"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step, 10) #", "downsample=100, ) def test_list_tensors_all(self): provider = self.create_provider() result = provider.list_tensors(", "summary_v2 as scalar_summary from tensorboard.plugins.image import metadata as image_metadata from", "or implied. # See the License for the specific language", "written by V2 summary ops self.assertEqual(sample.description, \"very smooth\") def test_list_tensors_filters(self):", "with rank-1 data of scalar data class (bad!). metadata =", "result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(),", "result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"waves\"][\"sine\"], 3)", "= os.path.join(self.logdir, \"waves\") with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar(\"sine\",", "self.assertIsNot(actual, xs) self.assertEqual(actual, expected) def test_inorder(self): xs = list(range(10000)) actual", "sample = result[\"mondrian\"][\"blue\"] self.assertLen(sample, 4) # downsampled from 10 last", "tensors use the same underlying # filtering implementation. result =", "from tensorboard.plugins.image import metadata as image_metadata from tensorboard.plugins.image import summary_v2", "summary ops self.assertEqual(sample.description, \"boxen\") def test_list_scalars_filters(self): provider = self.create_provider() result", "self.create_multiplexer() return data_provider.MultiplexerDataProvider(multiplexer, self.logdir) def test_data_location(self): provider = self.create_provider() result", "back out), so we provide a fake multiplexer instead. start_times", "run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\", \"square\"]) result =", "provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\",", "def test_list_runs(self): # We can't control the timestamps of events", "= tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph) writer.flush() provider = self.create_provider() result = provider.list_plugins(self.ctx,", "\"\"\"Tests for the `_downsample` private helper function.\"\"\" def test_deterministic(self): xs", "provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\",", "disable=redefined-builtin import numpy as np from tensorboard import context from", "= result[\"mondrian\"][\"blue\"] self.assertLen(sample, 4) # downsampled from 10 last =", "k = 6 - abs(6 - i) # 1, ..,", "test_data_location(self): provider = self.create_provider() result = provider.data_location(self.ctx, experiment_id=\"unused\") self.assertEqual(result, self.logdir)", "= provider.read_blob( self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, ) self.assertEqual(blue1, blue2) self.assertNotEqual(blue1, red1) with", "event timestep could be found\") else: return result multiplexer =", "self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]), )", "self.assertEqual(blue1, blue2) self.assertNotEqual(blue1, red1) with self.subTest(\"filters by run/tag\"): result =", "`tensorboard.backend.event_processing.data_provider`.\"\"\" from __future__ import absolute_import from __future__ import division from", "event.step) self.assertEqual(datum.wall_time, event.wall_time) np.testing.assert_equal( datum.numpy, tensor_util.make_ndarray(event.tensor_proto), ) def test_read_tensors_downsamples(self): multiplexer", "so we provide a fake multiplexer instead. start_times = {", "self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) sample = result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample, base_provider.TensorTimeSeries)", "self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( tags=[\"square\", \"quartic\"] ), ) self.assertItemsEqual(result.keys(), [\"polynomials\",", "for i in xrange(10): scalar_summary.scalar(\"sine\", tf.sin(float(i)), step=i) scalar_summary.scalar( \"square\", tf.sign(tf.sin(float(i))),", "self.assertStartsWith(blobs[2], b\"\\x89PNG\") self.assertStartsWith(blobs[3], b\"\\x89PNG\") blue1 = blobs[2] blue2 = blobs[3]", "= self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter =", "= provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(),", "written by V2 summary ops self.assertEqual(sample.description, \"boxen\") def test_list_scalars_filters(self): provider", "data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"lebesgue\"], tags=[\"uniform\", \"bimodal\"],", "in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) np.testing.assert_equal( datum.numpy, tensor_util.make_ndarray(event.tensor_proto),", "i, 1]) image_summary.image( name, image, step=i, description=description, max_outputs=99, ) def", "tf1.enable_eager_execution() class MultiplexerDataProviderTest(tf.test.TestCase): def setUp(self): super(MultiplexerDataProviderTest, self).setUp() self.logdir = self.get_temp_dir()", "run): self.assertIn(run, start_times) result = start_times[run] if result is None:", "[\"red\", \"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertLen(sample, 4) #", "= provider.list_runs(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ base_provider.Run( run_id=run, run_name=run, start_time=start_time", "provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]), ) self.assertEqual(result, {}) def", "i in xrange(1, 11): # Use a non-monotonic sequence of", "def test_underlong_ok(self): xs = list(\"abcdefg\") actual = data_provider._downsample(xs, k=10) expected", "return result def FirstEventTimestamp(multiplexer, run): self.assertIn(run, start_times) result = start_times[run]", "# filtering implementation. result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(", "run_tag_filter=run_tag_filter, downsample=100, ) def test_list_tensors_all(self): provider = self.create_provider() result =", "self.logdir = self.get_temp_dir() self.ctx = context.RequestContext() logdir = os.path.join(self.logdir, \"polynomials\")", "rank-1 data of scalar data class (bad!). metadata = summary_pb2.SummaryMetadata()", "with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar(\"sine\", tf.sin(float(i)), step=i) scalar_summary.scalar(", "summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"high_tide\", tensor=i, step=i, metadata=metadata ) # Summary with", "tags=[\"sine\", \"square\", \"cube\", \"iridescence\"], ) result = provider.read_scalars( self.ctx, experiment_id=\"unused\",", "histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_runs(self): # We can't", "result, [ \"greetings\", \"marigraphs\", histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def", "run_tag_filter = base_provider.RunTagFilter([\"waves\"], [\"bad\"]) # No explicit checks yet. with", "self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars(", ") self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"])", "scalar\", ): provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=\"greetings\", run_tag_filter=run_tag_filter, downsample=100, ) def", ") # not written by V2 summary ops self.assertEqual(sample.description, \"boxen\")", "blobs[3] red1 = provider.read_blob( self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, ) self.assertEqual(blue1, blue2) self.assertNotEqual(blue1,", "def test_read_blob_sequences_and_read_blob(self): provider = self.create_provider() with self.subTest(\"reads all time series", "in writing, software # distributed under the License is distributed", "= data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result = provider.read_tensors( self.ctx, experiment_id=\"unused\",", "result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\",", "from __future__ import division from __future__ import print_function import os", "None, \"second_1\": 2.0, } class FakeMultiplexer(object): def Runs(multiplexer): result =", ") self.assertLen(result[\"waves\"][\"sine\"], 3) def test_read_scalars_but_not_rank_0(self): provider = self.create_provider() run_tag_filter =", "result, [ \"greetings\", \"marigraphs\", graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], )", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(),", "experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"lebesgue\"][\"uniform\"], 3) def test_list_blob_sequences(self): provider =", "= \"abcdefg\" actual = data_provider._downsample(xs, k=0) self.assertEqual(actual, []) if __name__", "= { \"second_2\": 2.0, \"first\": 1.5, \"no_time\": None, \"second_1\": 2.0,", "run/tag\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"],", "def Runs(multiplexer): result = [\"second_2\", \"first\", \"no_time\", \"second_1\"] self.assertItemsEqual(result, start_times)", "self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) for run in result: for tag in", "of sample sizes to # test `max_length` calculation. k =", "the License for the specific language governing permissions and #", "= data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"waves\", \"polynomials\",", "= self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result =", "Python scalar\", ): provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=\"greetings\", run_tag_filter=run_tag_filter, downsample=100, )", "self.create_provider() # Quick check only, as scalars and tensors use", "result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertLen(sample, 4)", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "base_provider.BlobSequenceDatum) self.assertEqual(last.step, 10) self.assertLen(last.values, 2 + 2) blobs = [", "last = sample[-1] self.assertIsInstance(last, base_provider.BlobSequenceDatum) self.assertEqual(last.step, 10) self.assertLen(last.values, 2 +", "# them back out), so we provide a fake multiplexer", "[ \"greetings\", \"marigraphs\", graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def", "blue2) self.assertNotEqual(blue1, red1) with self.subTest(\"filters by run/tag\"): result = provider.read_blob_sequences(", "summary ops self.assertEqual(sample.description, \"very smooth\") def test_list_tensors_filters(self): provider = self.create_provider()", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "image_summary.image( name, image, step=i, description=description, max_outputs=99, ) def create_multiplexer(self): multiplexer", "111), \"bottom-right\"), ] for (name, color, description) in data: image_1x1", "by run/tag\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\",", "[\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\",", "# TODO(@wchargin): Verify that this always includes the most #", "plugin\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, ) self.assertItemsEqual(result.keys(), [\"mondrian\"])", "[ base_provider.Run( run_id=run, run_name=run, start_time=start_time ) for (run, start_time) in", "# distributed under the License is distributed on an \"AS", "os.path.join(self.logdir, \"mondrian\") with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"red\", (221, 28,", "# Unless required by applicable law or agreed to in", "result = [\"second_2\", \"first\", \"no_time\", \"second_1\"] self.assertItemsEqual(result, start_times) return result", "= self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\",", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "sample = result[\"polynomials\"][\"square\"] self.assertIsInstance(sample, base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step, 18) # nothing to", "# 1, .., 6, .., 2 # a `k`-sample image", "[\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) sample =", "Copyright 2019 The TensorFlow Authors. All Rights Reserved. # #", "instead. start_times = { \"second_2\": 2.0, \"first\": 1.5, \"no_time\": None,", "[\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries, ) def test_read_blob_sequences_and_read_blob(self): provider", "self.assertEqual(blobs[0], b\"10\") self.assertEqual(blobs[1], b\"10\") self.assertStartsWith(blobs[2], b\"\\x89PNG\") self.assertStartsWith(blobs[3], b\"\\x89PNG\") blue1 =", "the Apache License, Version 2.0 (the \"License\"); # you may", "step=i) scalar_summary.scalar( \"square\", tf.sign(tf.sin(float(i))), step=i ) # Summary with rank-0", "checks yet. with six.assertRaisesRegex( self, ValueError, \"can only convert an", "step=3 * i) logdir = os.path.join(self.logdir, \"waves\") with tf.summary.create_file_writer(logdir).as_default(): for", ") def test_list_tensors_all(self): provider = self.create_provider() result = provider.list_tensors( self.ctx,", "interface. multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir )", ") result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, )", "experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\", \"square\"])", "images image = tf.tile(image_1x1, [k, i, i, 1]) image_summary.image( name,", "provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result = provider.read_scalars( self.ctx,", "result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]), ) self.assertEqual(result,", "experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ],", "[\"square\", \"sine\"]) for run in result: for tag in result[run]:", "# Summary with rank-1 data of scalar data class (bad!).", "import data_provider from tensorboard.backend.event_processing import ( plugin_event_multiplexer as event_multiplexer, )", "as histogram_summary from tensorboard.plugins.scalar import metadata as scalar_metadata from tensorboard.plugins.scalar", "data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"waves\", \"polynomials\", \"unicorns\"],", "[\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx,", "= self.create_multiplexer() return data_provider.MultiplexerDataProvider(multiplexer, self.logdir) def test_data_location(self): provider = self.create_provider()", "scalar_summary.scalar( \"square\", i ** 2, step=2 * i, description=\"boxen\" )", "tensor_util import tensorflow.compat.v1 as tf1 import tensorflow.compat.v2 as tf tf1.enable_eager_execution()", "= base_provider.RunTagFilter( runs=[\"waves\", \"polynomials\", \"unicorns\"], tags=[\"sine\", \"square\", \"cube\", \"iridescence\"], )", "tf.summary.create_file_writer(logdir).as_default(): data = [ (\"very smooth\", (0.0, 0.25, 0.5, 0.75,", "self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum, ) class DownsampleTest(tf.test.TestCase): \"\"\"Tests for", "be found\") else: return result multiplexer = FakeMultiplexer() provider =", ") self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result =", "summary_v2 as histogram_summary from tensorboard.plugins.scalar import metadata as scalar_metadata from", "that this always includes the most # recent datum, as", "the data, and writing # them back out), so we", "be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual( sample.display_name, \"\" ) #", "convert an array of size 1 to a Python scalar\",", "self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum, ) class DownsampleTest(tf.test.TestCase): \"\"\"Tests for the `_downsample`", "# We can't control the timestamps of events written to", "\"green't\"] ), ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries,", "run/tag\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"],", "self.assertEqual(sample.description, \"very smooth\") def test_list_tensors_filters(self): provider = self.create_provider() # Quick", "downsample=3, ) self.assertLen(result[\"waves\"][\"sine\"], 3) def test_read_scalars_but_not_rank_0(self): provider = self.create_provider() run_tag_filter", "xs = \"abcdefg\" actual = data_provider._downsample(xs, k=0) self.assertEqual(actual, []) if", "\"mondrian\") with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"red\", (221, 28, 38),", "\"first\", \"no_time\", \"second_1\"] self.assertItemsEqual(result, start_times) return result def FirstEventTimestamp(multiplexer, run):", ") self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries, ) def", "range(100): actual = data_provider._downsample(xs, k=4) self.assertEqual(actual, expected) def test_underlong_ok(self): xs", "11): histogram_summary.histogram( name, tensor * i, step=i, description=description ) logdir", "plugin_name=image_metadata.PLUGIN_NAME, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] )", "under the License is distributed on an \"AS IS\" BASIS,", "helper function.\"\"\" def test_deterministic(self): xs = \"abcdefg\" expected = data_provider._downsample(xs,", "events written to disk (without # manually reading the tfrecords,", "downsample=100, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) for run in", "def test_list_plugins_with_no_graph(self): provider = self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual(", "plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( tags=[\"square\", \"quartic\"] ), ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(),", "writer.add_graph(graph) writer.flush() provider = self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual(", "the License. # ============================================================================== \"\"\"Unit tests for `tensorboard.backend.event_processing.data_provider`.\"\"\" from __future__", "experiment_id=\"unused\", plugin_name=\"greetings\", run_tag_filter=run_tag_filter, downsample=100, ) def test_list_tensors_all(self): provider = self.create_provider()", "[\"likely\"]), ) self.assertEqual(result, {}) def test_read_scalars(self): multiplexer = self.create_multiplexer() provider", "plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"], [\"uniform\"] ), ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"])", "= provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"polynomials\",", "def create_multiplexer(self): multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() return multiplexer def", "metadata as histogram_metadata from tensorboard.plugins.histogram import summary_v2 as histogram_summary from", "image_summary from tensorboard.util import tensor_util import tensorflow.compat.v1 as tf1 import", "from __future__ import absolute_import from __future__ import division from __future__", "= provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(),", "= sample[-1] self.assertIsInstance(last, base_provider.BlobSequenceDatum) self.assertEqual(last.step, 10) self.assertLen(last.values, 2 + 2)", "the timestamps of events written to disk (without # manually", "None: raise ValueError(\"No event timestep could be found\") else: return", "[\"lebesgue\"], [\"uniform\"] ), ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"]) def test_read_tensors(self):", "result = provider.list_runs(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ base_provider.Run( run_id=run, run_name=run,", "six.iteritems(start_times) ], ) def test_list_scalars_all(self): provider = self.create_provider() result =", "class (bad!). metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"greetings\" metadata.data_class =", "91, 158), \"bottom-left\"), (\"yellow\", (239, 220, 111), \"bottom-right\"), ] for", "] for (name, color, description) in data: image_1x1 = tf.constant([[[color]]],", "2.0, } class FakeMultiplexer(object): def Runs(multiplexer): result = [\"second_2\", \"first\",", "self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\",", "start_time) in six.iteritems(start_times) ], ) def test_list_scalars_all(self): provider = self.create_provider()", "only convert an array of size 1 to a Python", "[\"uniform\"] ), ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"]) def test_read_tensors(self): multiplexer", "k=4) for _ in range(100): actual = data_provider._downsample(xs, k=4) self.assertEqual(actual,", "base_provider.RunTagFilter([\"waves\"], [\"bad\"]) # No explicit checks yet. with six.assertRaisesRegex( self,", "ANY KIND, either express or implied. # See the License", "the License. # You may obtain a copy of the", "metadata as scalar_metadata from tensorboard.plugins.scalar import summary_v2 as scalar_summary from", "logdir = os.path.join(self.logdir, \"lebesgue\") with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"very", "self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\"]) def test_read_tensors(self): multiplexer = self.create_multiplexer() provider", "# See the License for the specific language governing permissions", "self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"waves\"][\"sine\"], 3) def test_read_scalars_but_not_rank_0(self): provider", "plugin\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, downsample=4, ) self.assertItemsEqual(result.keys(),", "\"waves\") with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar(\"sine\", tf.sin(float(i)), step=i)", "= provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"lebesgue\"][\"uniform\"], 3) def", "self.assertNotEqual(blue1, red1) with self.subTest(\"filters by run/tag\"): result = provider.read_blob_sequences( self.ctx,", "provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, downsample=4, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(),", "self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result = provider.read_tensors(", "writer = tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph) writer.flush() provider = self.create_provider() result =", "zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) self.assertEqual( datum.value, tensor_util.make_ndarray(event.tensor_proto).item(), )", "experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"])", "in last.values ] self.assertEqual(blobs[0], b\"10\") self.assertEqual(blobs[1], b\"10\") self.assertStartsWith(blobs[2], b\"\\x89PNG\") self.assertStartsWith(blobs[3],", "for _ in range(100): actual = data_provider._downsample(xs, k=4) self.assertEqual(actual, expected)", "data_provider._downsample(xs, k=10) expected = list(\"abcdefg\") self.assertIsNot(actual, xs) self.assertEqual(actual, expected) def", "= summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"bad\", tensor=[i, i], step=i, metadata=metadata ) logdir", "b\"10\") self.assertStartsWith(blobs[2], b\"\\x89PNG\") self.assertStartsWith(blobs[3], b\"\\x89PNG\") blue1 = blobs[2] blue2 =", "scalar_summary.scalar(\"sine\", tf.sin(float(i)), step=i) scalar_summary.scalar( \"square\", tf.sign(tf.sin(float(i))), step=i ) # Summary", "run_tag_filter=base_provider.RunTagFilter( tags=[\"square\", \"quartic\"] ), ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"])", "2 # a `k`-sample image summary of `i`-by-`i` images image", "self.assertEqual(sample.description, \"boxen\") def test_list_scalars_filters(self): provider = self.create_provider() result = provider.list_scalars(", "self.assertEqual(last.step, 10) self.assertLen(last.values, 2 + 2) blobs = [ provider.read_blob(self.ctx,", "self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample,", "only, as scalars and tensors use the same underlying #", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", ") self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\",", "writing, software # distributed under the License is distributed on", "= \"marigraphs\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"high_tide\", tensor=i, step=i, metadata=metadata", "tags=[\"yellow\", \"green't\"] ), downsample=1, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance(", "for (name, color, description) in data: image_1x1 = tf.constant([[[color]]], dtype=tf.uint8)", "self.assertEqual(result, self.logdir) def test_list_plugins_with_no_graph(self): provider = self.create_provider() result = provider.list_plugins(self.ctx,", "import provider as base_provider from tensorboard.plugins.graph import metadata as graph_metadata", "result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]), ) self.assertItemsEqual(result.keys(),", "= result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample, base_provider.TensorTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to test", "= blobs[3] red1 = provider.read_blob( self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, ) self.assertEqual(blue1, blue2)", "experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"])", "self.get_temp_dir() self.ctx = context.RequestContext() logdir = os.path.join(self.logdir, \"polynomials\") with tf.summary.create_file_writer(logdir).as_default():", "actual = data_provider._downsample(xs, k=10) expected = list(\"abcdefg\") self.assertIsNot(actual, xs) self.assertEqual(actual,", "data_provider.MultiplexerDataProvider(multiplexer, self.logdir) def test_data_location(self): provider = self.create_provider() result = provider.data_location(self.ctx,", "as it can't be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual( sample.display_name,", "downsample=4, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] )", "image, step=i, description=description, max_outputs=99, ) def create_multiplexer(self): multiplexer = event_multiplexer.EventMultiplexer()", "provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"])", "of `i`-by-`i` images image = tf.tile(image_1x1, [k, i, i, 1])", "= event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() return multiplexer def create_provider(self): multiplexer =", "i, i, 1]) image_summary.image( name, image, step=i, description=description, max_outputs=99, )", "test_zero(self): xs = \"abcdefg\" actual = data_provider._downsample(xs, k=0) self.assertEqual(actual, [])", "= base_provider.RunTagFilter( runs=[\"lebesgue\"], tags=[\"uniform\", \"bimodal\"], ) result = provider.read_tensors( self.ctx,", "tfrecords, modifying the data, and writing # them back out),", ") self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] ) sample", "owned by the scalars plugin. metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name =", "plugin_name=image_metadata.PLUGIN_NAME, downsample=4, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"]", "runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"])", "tf.compat.v1.Graph().as_default() as graph: writer = tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph) writer.flush() provider =", "(239, 220, 111), \"bottom-right\"), ] for (name, color, description) in", "provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME,", "\"very smooth\") def test_list_tensors_filters(self): provider = self.create_provider() # Quick check", "step=i, metadata=metadata ) logdir = os.path.join(self.logdir, \"lebesgue\") with tf.summary.create_file_writer(logdir).as_default(): data", "base_provider.RunTagFilter( runs=[\"lebesgue\"], tags=[\"uniform\", \"bimodal\"], ) result = provider.read_tensors( self.ctx, experiment_id=\"unused\",", "result[\"mondrian\"][\"blue\"] self.assertLen(sample, 4) # downsampled from 10 last = sample[-1]", "self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) for run in result: for", "data = [ (\"very smooth\", (0.0, 0.25, 0.5, 0.75, 1.0),", "provider = self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None,", "2, step=2 * i, description=\"boxen\" ) scalar_summary.scalar(\"cube\", i ** 3,", "28, 38), \"top-right\"), (\"blue\", (1, 91, 158), \"bottom-left\"), (\"yellow\", (239,", "plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) for", "test_list_scalars_filters(self): provider = self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) self.assertEqual( datum.value, tensor_util.make_ndarray(event.tensor_proto).item(),", "(\"red\", (221, 28, 38), \"top-right\"), (\"blue\", (1, 91, 158), \"bottom-left\"),", "for wall time, as it can't be mocked out self.assertEqual(sample.plugin_content,", "self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result = provider.read_scalars(", ") self.assertLen(result[\"lebesgue\"][\"uniform\"], 3) def test_list_blob_sequences(self): provider = self.create_provider() with self.subTest(\"finds", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "metadata as image_metadata from tensorboard.plugins.image import summary_v2 as image_summary from", "histogram_summary.histogram( name, tensor * i, step=i, description=description ) logdir =", "provider = self.create_provider() # Quick check only, as scalars and", "tf.summary.write( \"high_tide\", tensor=i, step=i, metadata=metadata ) # Summary with rank-1", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "i) # 1, .., 6, .., 2 # a `k`-sample", "of size 1 to a Python scalar\", ): provider.read_scalars( self.ctx,", "with self.subTest(\"reads all time series for a plugin\"): result =", "time, as it can't be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(sample.max_length,", "(bad!). metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"greetings\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR", ") logdir = os.path.join(self.logdir, \"lebesgue\") with tf.summary.create_file_writer(logdir).as_default(): data = [", "for i in xrange(1, 11): histogram_summary.histogram( name, tensor * i,", "from 10 last = sample[-1] self.assertIsInstance(last, base_provider.BlobSequenceDatum) self.assertEqual(last.step, 10) self.assertLen(last.values,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "def test_data_location(self): provider = self.create_provider() result = provider.data_location(self.ctx, experiment_id=\"unused\") self.assertEqual(result,", "TODO(@wchargin): Verify that this always includes the most # recent", "ops self.assertEqual(sample.description, \"very smooth\") def test_list_tensors_filters(self): provider = self.create_provider() #", "[\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum, ) class DownsampleTest(tf.test.TestCase): \"\"\"Tests for the", "), ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries, )", "metadata=metadata ) # Summary with rank-1 data of scalar data", "Rights Reserved. # # Licensed under the Apache License, Version", ") result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"lebesgue\"][\"uniform\"],", "tf.sin(float(i)), step=i) scalar_summary.scalar( \"square\", tf.sign(tf.sin(float(i))), step=i ) # Summary with", "specific language governing permissions and # limitations under the License.", "result[\"polynomials\"][\"square\"] self.assertIsInstance(sample, base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step, 18) # nothing to test for", "can't be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual( sample.display_name, \"\" )", "sample sizes to # test `max_length` calculation. k = 6", "[\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries, ) def test_read_blob_sequences_and_read_blob(self): provider = self.create_provider()", "and # limitations under the License. # ============================================================================== \"\"\"Unit tests", "result: for tag in result[run]: tensor_events = multiplexer.Tensors(run, tag) self.assertLen(result[run][tag],", "= list(range(10000)) actual = data_provider._downsample(xs, k=100) self.assertEqual(actual, sorted(actual)) def test_zero(self):", "specified by the interface. multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider(", "all time series for a plugin\"): result = provider.list_blob_sequences( self.ctx,", "\"boxen\") def test_list_scalars_filters(self): provider = self.create_provider() result = provider.list_scalars( self.ctx,", "# you may not use this file except in compliance", "test_read_scalars_but_not_rank_0(self): provider = self.create_provider() run_tag_filter = base_provider.RunTagFilter([\"waves\"], [\"bad\"]) # No", "import tensor_util import tensorflow.compat.v1 as tf1 import tensorflow.compat.v2 as tf", "import division from __future__ import print_function import os import six", "= summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"marigraphs\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"high_tide\",", "i in xrange(1, 11): histogram_summary.histogram( name, tensor * i, step=i,", "( plugin_event_multiplexer as event_multiplexer, ) from tensorboard.compat.proto import summary_pb2 from", "provider = self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [", "image_metadata from tensorboard.plugins.image import summary_v2 as image_summary from tensorboard.util import", "experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), ) self.assertItemsEqual(result.keys(),", "# a `k`-sample image summary of `i`-by-`i` images image =", "\"bimodal\"]) for run in result: for tag in result[run]: tensor_events", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "def test_inorder(self): xs = list(range(10000)) actual = data_provider._downsample(xs, k=100) self.assertEqual(actual,", "as tf1 import tensorflow.compat.v2 as tf tf1.enable_eager_execution() class MultiplexerDataProviderTest(tf.test.TestCase): def", "# nothing to test for wall time, as it can't", "test_inorder(self): xs = list(range(10000)) actual = data_provider._downsample(xs, k=100) self.assertEqual(actual, sorted(actual))", "6, .., 2 # a `k`-sample image summary of `i`-by-`i`", "rank-0 data but not owned by the scalars plugin. metadata", "in xrange(1, 11): # Use a non-monotonic sequence of sample", "import xrange # pylint: disable=redefined-builtin import numpy as np from", "{}) def test_read_scalars(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer,", "= blobs[2] blue2 = blobs[3] red1 = provider.read_blob( self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key,", "under the Apache License, Version 2.0 (the \"License\"); # you", "os import six from six.moves import xrange # pylint: disable=redefined-builtin", "self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) sample = result[\"polynomials\"][\"square\"] self.assertIsInstance(sample, base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step, 18)", "tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) self.assertEqual( datum.value, tensor_util.make_ndarray(event.tensor_proto).item(), ) def", "test_deterministic(self): xs = \"abcdefg\" expected = data_provider._downsample(xs, k=4) for _", ") # Summary with rank-1 data of scalar data class", "summary_pb2 from tensorboard.data import provider as base_provider from tensorboard.plugins.graph import", "FakeMultiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, \"fake_logdir\" ) result = provider.list_runs(self.ctx,", "downsample=3, ) self.assertLen(result[\"lebesgue\"][\"uniform\"], 3) def test_list_blob_sequences(self): provider = self.create_provider() with", "xs) self.assertEqual(actual, expected) def test_inorder(self): xs = list(range(10000)) actual =", "v in last.values ] self.assertEqual(blobs[0], b\"10\") self.assertEqual(blobs[1], b\"10\") self.assertStartsWith(blobs[2], b\"\\x89PNG\")", "color, description) in data: image_1x1 = tf.constant([[[color]]], dtype=tf.uint8) for i", "self.subTest(\"finds all time series for a plugin\"): result = provider.list_blob_sequences(", "sample[-1] self.assertIsInstance(last, base_provider.BlobSequenceDatum) self.assertEqual(last.step, 10) self.assertLen(last.values, 2 + 2) blobs", "to a Python scalar\", ): provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=\"greetings\", run_tag_filter=run_tag_filter,", "tf.constant([distribution], dtype=tf.float64) for i in xrange(1, 11): histogram_summary.histogram( name, tensor", "smooth\") def test_list_tensors_filters(self): provider = self.create_provider() # Quick check only,", "2 + 2) blobs = [ provider.read_blob(self.ctx, blob_key=v.blob_key) for v", ") logdir = os.path.join(self.logdir, \"mondrian\") with tf.summary.create_file_writer(logdir).as_default(): data = [", "limitations under the License. # ============================================================================== \"\"\"Unit tests for `tensorboard.backend.event_processing.data_provider`.\"\"\"", "in xrange(10): scalar_summary.scalar(\"sine\", tf.sin(float(i)), step=i) scalar_summary.scalar( \"square\", tf.sign(tf.sin(float(i))), step=i )", "class FakeMultiplexer(object): def Runs(multiplexer): result = [\"second_2\", \"first\", \"no_time\", \"second_1\"]", "self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), )", "private helper function.\"\"\" def test_deterministic(self): xs = \"abcdefg\" expected =", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ],", "\"lebesgue\") with tf.summary.create_file_writer(logdir).as_default(): data = [ (\"very smooth\", (0.0, 0.25,", "expected) def test_underlong_ok(self): xs = list(\"abcdefg\") actual = data_provider._downsample(xs, k=10)", "xrange(1, 11): histogram_summary.histogram( name, tensor * i, step=i, description=description )", "test for wall time, as it can't be mocked out", "self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"lebesgue\"], tags=[\"uniform\", \"bimodal\"], ) result", "scalars plugin. metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"marigraphs\" metadata.data_class =", "self.assertEqual(sample.max_step, 18) # nothing to test for wall time, as", "self.assertLen(result[\"waves\"][\"sine\"], 3) def test_read_scalars_but_not_rank_0(self): provider = self.create_provider() run_tag_filter = base_provider.RunTagFilter([\"waves\"],", "run_tag_filter = base_provider.RunTagFilter( runs=[\"lebesgue\"], tags=[\"uniform\", \"bimodal\"], ) result = provider.read_tensors(", "** 3, step=3 * i) logdir = os.path.join(self.logdir, \"waves\") with", "= provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"])", "b\"\") self.assertEqual(sample.max_length, 6 + 2) self.assertEqual(sample.description, \"bottom-left\") self.assertEqual(sample.display_name, \"\") with", "test_read_scalars(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir )", "provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(),", "self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(runs=[\"waves\", \"hugs\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\",", "self.ctx, experiment_id=\"unused\", plugin_name=\"greetings\", run_tag_filter=run_tag_filter, downsample=100, ) def test_list_tensors_all(self): provider =", "= FakeMultiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, \"fake_logdir\" ) result =", "includes the most # recent datum, as specified by the", "metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"bad\", tensor=[i, i], step=i, metadata=metadata )", "blobs = [ provider.read_blob(self.ctx, blob_key=v.blob_key) for v in last.values ]", "self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) self.assertEqual( datum.value, tensor_util.make_ndarray(event.tensor_proto).item(), ) def test_read_scalars_downsamples(self):", "tensor_events = multiplexer.Tensors(run, tag) self.assertLen(result[run][tag], len(tensor_events)) for (datum, event) in", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "= summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"high_tide\", tensor=i, step=i, metadata=metadata ) # Summary", "Summary with rank-0 data but not owned by the scalars", "Authors. All Rights Reserved. # # Licensed under the Apache", "# limitations under the License. # ============================================================================== \"\"\"Unit tests for", "tensorboard import context from tensorboard.backend.event_processing import data_provider from tensorboard.backend.event_processing import", "self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]), ) self.assertEqual(result, {}) def test_read_scalars(self):", "sample = result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to", "Summary with rank-1 data of scalar data class (bad!). metadata", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "= provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"]", ") class DownsampleTest(tf.test.TestCase): \"\"\"Tests for the `_downsample` private helper function.\"\"\"", "self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"], [\"uniform\"] ), ) self.assertItemsEqual(result.keys(), [\"lebesgue\"])", "# ============================================================================== \"\"\"Unit tests for `tensorboard.backend.event_processing.data_provider`.\"\"\" from __future__ import absolute_import", "import summary_v2 as image_summary from tensorboard.util import tensor_util import tensorflow.compat.v1", "\"no_time\": None, \"second_1\": 2.0, } class FakeMultiplexer(object): def Runs(multiplexer): result", "for run in result: for tag in result[run]: tensor_events =", "xrange(1, 11): # Use a non-monotonic sequence of sample sizes", "def test_list_blob_sequences(self): provider = self.create_provider() with self.subTest(\"finds all time series", "\"marigraphs\", graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_runs(self): #", ") self.assertEqual(blue1, blue2) self.assertNotEqual(blue1, red1) with self.subTest(\"filters by run/tag\"): result", "[\"uniform\", \"bimodal\"]) for run in result: for tag in result[run]:", "time series for a plugin\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\",", "in result: for tag in result[run]: tensor_events = multiplexer.Tensors(run, tag)", "ops self.assertEqual(sample.description, \"boxen\") def test_list_scalars_filters(self): provider = self.create_provider() result =", ".., 2 # a `k`-sample image summary of `i`-by-`i` images", "self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum, ) class DownsampleTest(tf.test.TestCase):", "\"iridescence\"], ) result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100,", "def test_list_scalars_filters(self): provider = self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\",", "experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "\"high_tide\", tensor=i, step=i, metadata=metadata ) # Summary with rank-1 data", "data but not owned by the scalars plugin. metadata =", "start_times = { \"second_2\": 2.0, \"first\": 1.5, \"no_time\": None, \"second_1\":", "scalar_metadata.PLUGIN_NAME, ], ) def test_list_runs(self): # We can't control the", "filtering implementation. result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"],", "`i`-by-`i` images image = tf.tile(image_1x1, [k, i, i, 1]) image_summary.image(", "multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"waves\", \"polynomials\", \"unicorns\"], tags=[\"sine\",", "sample.display_name, \"\" ) # not written by V2 summary ops", "= provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"])", "\"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertLen(sample, 4) # downsampled from", "(without # manually reading the tfrecords, modifying the data, and", "description=description, max_outputs=99, ) def create_multiplexer(self): multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload()", "provider = data_provider.MultiplexerDataProvider( multiplexer, \"fake_logdir\" ) result = provider.list_runs(self.ctx, experiment_id=\"unused\")", "self.assertIsInstance(sample, base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step, 18) # nothing to test for wall", "logdir = os.path.join(self.logdir, \"polynomials\") with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10):", "from six.moves import xrange # pylint: disable=redefined-builtin import numpy as", "= data_provider._downsample(xs, k=10) expected = list(\"abcdefg\") self.assertIsNot(actual, xs) self.assertEqual(actual, expected)", "`max_length` calculation. k = 6 - abs(6 - i) #", "for tag in result[run]: tensor_events = multiplexer.Tensors(run, tag) self.assertLen(result[run][tag], len(tensor_events))", "writer.flush() provider = self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result,", "plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) sample =", "experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"]", "[\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(", "event_multiplexer, ) from tensorboard.compat.proto import summary_pb2 from tensorboard.data import provider", "10 last = sample[-1] self.assertIsInstance(last, base_provider.BlobSequenceDatum) self.assertEqual(last.step, 10) self.assertLen(last.values, 2", "multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() return multiplexer def create_provider(self): multiplexer", "test_list_plugins_with_no_graph(self): provider = self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result,", "__future__ import division from __future__ import print_function import os import", "data_provider._downsample(xs, k=4) self.assertEqual(actual, expected) def test_underlong_ok(self): xs = list(\"abcdefg\") actual", "multiplexer def create_provider(self): multiplexer = self.create_multiplexer() return data_provider.MultiplexerDataProvider(multiplexer, self.logdir) def", "provider.read_blob(self.ctx, blob_key=v.blob_key) for v in last.values ] self.assertEqual(blobs[0], b\"10\") self.assertEqual(blobs[1],", "with tf.compat.v1.Graph().as_default() as graph: writer = tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph) writer.flush() provider", "= base_provider.RunTagFilter([\"waves\"], [\"bad\"]) # No explicit checks yet. with six.assertRaisesRegex(", "= provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"])", "= self.create_provider() result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None, )", "use this file except in compliance with the License. #", "actual = data_provider._downsample(xs, k=100) self.assertEqual(actual, sorted(actual)) def test_zero(self): xs =", "[ \"greetings\", \"marigraphs\", histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_plugins_with_graph(self):", "\"bottom-right\"), ] for (name, color, description) in data: image_1x1 =", "\"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) sample = result[\"polynomials\"][\"square\"]", "red1) with self.subTest(\"filters by run/tag\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\",", "\"bimodal\"), ] for (description, distribution, name) in data: tensor =", "The TensorFlow Authors. All Rights Reserved. # # Licensed under", "\"unicorns\"], tags=[\"sine\", \"square\", \"cube\", \"iridescence\"], ) result = provider.read_scalars( self.ctx,", "a plugin\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, downsample=4, )", "provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"], [\"uniform\"] ), ) self.assertItemsEqual(result.keys(),", "division from __future__ import print_function import os import six from", "tensorboard.backend.event_processing import data_provider from tensorboard.backend.event_processing import ( plugin_event_multiplexer as event_multiplexer,", "test `max_length` calculation. k = 6 - abs(6 - i)", "histogram_metadata from tensorboard.plugins.histogram import summary_v2 as histogram_summary from tensorboard.plugins.scalar import", "tf tf1.enable_eager_execution() class MultiplexerDataProviderTest(tf.test.TestCase): def setUp(self): super(MultiplexerDataProviderTest, self).setUp() self.logdir =", "metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"marigraphs\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write(", "result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries, ) def test_read_blob_sequences_and_read_blob(self): provider = self.create_provider() with self.subTest(\"reads", "\"bad\", tensor=[i, i], step=i, metadata=metadata ) logdir = os.path.join(self.logdir, \"lebesgue\")", "self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"waves\", \"polynomials\", \"unicorns\"], tags=[\"sine\", \"square\",", "# test `max_length` calculation. k = 6 - abs(6 -", "six from six.moves import xrange # pylint: disable=redefined-builtin import numpy", "from tensorboard.plugins.scalar import metadata as scalar_metadata from tensorboard.plugins.scalar import summary_v2", "blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, ) self.assertEqual(blue1, blue2) self.assertNotEqual(blue1, red1) with self.subTest(\"filters by run/tag\"):", "in compliance with the License. # You may obtain a", "result = start_times[run] if result is None: raise ValueError(\"No event", "software # distributed under the License is distributed on an", "provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ),", "super(MultiplexerDataProviderTest, self).setUp() self.logdir = self.get_temp_dir() self.ctx = context.RequestContext() logdir =", "summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"greetings\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"bad\", tensor=[i,", "============================================================================== \"\"\"Unit tests for `tensorboard.backend.event_processing.data_provider`.\"\"\" from __future__ import absolute_import from", "result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"lebesgue\"][\"uniform\"], 3)", "tf.constant([[[color]]], dtype=tf.uint8) for i in xrange(1, 11): # Use a", "b\"\\x89PNG\") blue1 = blobs[2] blue2 = blobs[3] red1 = provider.read_blob(", "metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"greetings\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write(", ") from tensorboard.compat.proto import summary_pb2 from tensorboard.data import provider as", "tensorboard.plugins.histogram import summary_v2 as histogram_summary from tensorboard.plugins.scalar import metadata as", "\"marigraphs\", histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_plugins_with_graph(self): with tf.compat.v1.Graph().as_default()", "base_provider.TensorTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to test for wall time,", "of scalar data class (bad!). metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name =", "self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(),", "run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]), ) self.assertEqual(result, {}) def test_read_scalars(self): multiplexer = self.create_multiplexer()", "modifying the data, and writing # them back out), so", "= summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"greetings\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"bad\",", "[\"square\", \"sine\"]) sample = result[\"polynomials\"][\"square\"] self.assertIsInstance(sample, base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step, 18) #", "with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar( \"square\", i **", "} class FakeMultiplexer(object): def Runs(multiplexer): result = [\"second_2\", \"first\", \"no_time\",", "__future__ import print_function import os import six from six.moves import", "\"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) for run in", "from tensorboard.plugins.image import summary_v2 as image_summary from tensorboard.util import tensor_util", "the same underlying # filtering implementation. result = provider.list_tensors( self.ctx,", "out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(sample.max_length, 6 + 2) self.assertEqual(sample.description, \"bottom-left\") self.assertEqual(sample.display_name,", "run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(),", "provider = self.create_provider() result = provider.data_location(self.ctx, experiment_id=\"unused\") self.assertEqual(result, self.logdir) def", "size 1 to a Python scalar\", ): provider.read_scalars( self.ctx, experiment_id=\"unused\",", "plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"waves\"][\"sine\"], 3) def test_read_scalars_but_not_rank_0(self): provider = self.create_provider()", "\"cube\", \"iridescence\"], ) result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter,", "import tensorflow.compat.v1 as tf1 import tensorflow.compat.v2 as tf tf1.enable_eager_execution() class", "\"can only convert an array of size 1 to a", "scalar_metadata from tensorboard.plugins.scalar import summary_v2 as scalar_summary from tensorboard.plugins.image import", "tensorboard.plugins.image import summary_v2 as image_summary from tensorboard.util import tensor_util import", "use the same underlying # filtering implementation. result = provider.list_tensors(", "plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"lebesgue\"][\"uniform\"], 3) def test_list_blob_sequences(self): provider = self.create_provider()", "with the License. # You may obtain a copy of", "import context from tensorboard.backend.event_processing import data_provider from tensorboard.backend.event_processing import (", "), ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result", "= list(\"abcdefg\") actual = data_provider._downsample(xs, k=10) expected = list(\"abcdefg\") self.assertIsNot(actual,", ") self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum, ) class", "data of scalar data class (bad!). metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name", "downsample=100, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\",", "step=i ) # Summary with rank-0 data but not owned", ") self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) sample = result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample,", "print_function import os import six from six.moves import xrange #", "def test_read_scalars_but_not_rank_0(self): provider = self.create_provider() run_tag_filter = base_provider.RunTagFilter([\"waves\"], [\"bad\"]) #", "by the scalars plugin. metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"marigraphs\"", "ValueError(\"No event timestep could be found\") else: return result multiplexer", "tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar( \"square\", i ** 2,", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "\"\") with self.subTest(\"filters by run/tag\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\",", "\"uniform\"), (\"very smoothn't\", (0.0, 0.01, 0.99, 1.0), \"bimodal\"), ] for", "non-monotonic sequence of sample sizes to # test `max_length` calculation.", "= \"abcdefg\" expected = data_provider._downsample(xs, k=4) for _ in range(100):", "= data_provider._downsample(xs, k=0) self.assertEqual(actual, []) if __name__ == \"__main__\": tf.test.main()", "k=100) self.assertEqual(actual, sorted(actual)) def test_zero(self): xs = \"abcdefg\" actual =", "result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", histogram_metadata.PLUGIN_NAME,", "def test_read_tensors(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir", "dtype=tf.uint8) for i in xrange(1, 11): # Use a non-monotonic", "summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"marigraphs\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"high_tide\", tensor=i,", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]), ) self.assertEqual(result, {}) def test_read_scalars(self): multiplexer", "i, step=i, description=description ) logdir = os.path.join(self.logdir, \"mondrian\") with tf.summary.create_file_writer(logdir).as_default():", "`k`-sample image summary of `i`-by-`i` images image = tf.tile(image_1x1, [k,", "result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"], [\"uniform\"] ),", "len(tensor_events)) for (datum, event) in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time,", "as image_metadata from tensorboard.plugins.image import summary_v2 as image_summary from tensorboard.util", "CONDITIONS OF ANY KIND, either express or implied. # See", "data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME,", "multiplexer.Reload() return multiplexer def create_provider(self): multiplexer = self.create_multiplexer() return data_provider.MultiplexerDataProvider(multiplexer,", "fake multiplexer instead. start_times = { \"second_2\": 2.0, \"first\": 1.5,", "self.assertEqual( datum.value, tensor_util.make_ndarray(event.tensor_proto).item(), ) def test_read_scalars_downsamples(self): # TODO(@wchargin): Verify that", "def setUp(self): super(MultiplexerDataProviderTest, self).setUp() self.logdir = self.get_temp_dir() self.ctx = context.RequestContext()", "[\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) for run in result: for tag", "graph_metadata from tensorboard.plugins.histogram import metadata as histogram_metadata from tensorboard.plugins.histogram import", "(\"blue\", (1, 91, 158), \"bottom-left\"), (\"yellow\", (239, 220, 111), \"bottom-right\"),", "test_list_tensors_all(self): provider = self.create_provider() result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME,", "tensorboard.backend.event_processing import ( plugin_event_multiplexer as event_multiplexer, ) from tensorboard.compat.proto import", "(name, color, description) in data: image_1x1 = tf.constant([[[color]]], dtype=tf.uint8) for", "self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to test for wall", "\"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertLen(sample, 4) # downsampled", "\"polynomials\") with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar( \"square\", i", "experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, downsample=4, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\",", "context from tensorboard.backend.event_processing import data_provider from tensorboard.backend.event_processing import ( plugin_event_multiplexer", "recent datum, as specified by the interface. multiplexer = self.create_multiplexer()", "step=2 * i, description=\"boxen\" ) scalar_summary.scalar(\"cube\", i ** 3, step=3", "+ 2) self.assertEqual(sample.description, \"bottom-left\") self.assertEqual(sample.display_name, \"\") with self.subTest(\"filters by run/tag\"):", "self.assertItemsEqual(result, start_times) return result def FirstEventTimestamp(multiplexer, run): self.assertIn(run, start_times) result", "[\"bad\"]) # No explicit checks yet. with six.assertRaisesRegex( self, ValueError,", "multiplexer, self.logdir ) result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3,", "it can't be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual( sample.display_name, \"\"", "as np from tensorboard import context from tensorboard.backend.event_processing import data_provider", "for i in xrange(10): scalar_summary.scalar( \"square\", i ** 2, step=2", "image_1x1 = tf.constant([[[color]]], dtype=tf.uint8) for i in xrange(1, 11): #", "= self.get_temp_dir() self.ctx = context.RequestContext() logdir = os.path.join(self.logdir, \"polynomials\") with", "38), \"top-right\"), (\"blue\", (1, 91, 158), \"bottom-left\"), (\"yellow\", (239, 220,", "plugin_event_multiplexer as event_multiplexer, ) from tensorboard.compat.proto import summary_pb2 from tensorboard.data", "base_provider.BlobSequenceTimeSeries, ) def test_read_blob_sequences_and_read_blob(self): provider = self.create_provider() with self.subTest(\"reads all", "= provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"waves\"][\"sine\"], 3) def", "result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]), ) self.assertItemsEqual(result.keys(),", "= data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result = provider.read_scalars( self.ctx, experiment_id=\"unused\",", "run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(),", "smoothn't\", (0.0, 0.01, 0.99, 1.0), \"bimodal\"), ] for (description, distribution,", "b\"\") self.assertEqual( sample.display_name, \"\" ) # not written by V2", "and writing # them back out), so we provide a", "self.assertIsInstance(last, base_provider.BlobSequenceDatum) self.assertEqual(last.step, 10) self.assertLen(last.values, 2 + 2) blobs =", "downsample=1, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"][0], base_provider.BlobSequenceDatum, )", "= result[\"polynomials\"][\"square\"] self.assertIsInstance(sample, base_provider.ScalarTimeSeries) self.assertEqual(sample.max_step, 18) # nothing to test", "datum.value, tensor_util.make_ndarray(event.tensor_proto).item(), ) def test_read_scalars_downsamples(self): # TODO(@wchargin): Verify that this", "metadata=metadata ) logdir = os.path.join(self.logdir, \"lebesgue\") with tf.summary.create_file_writer(logdir).as_default(): data =", "self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME,", "data_provider from tensorboard.backend.event_processing import ( plugin_event_multiplexer as event_multiplexer, ) from", "# downsampled from 10 last = sample[-1] self.assertIsInstance(last, base_provider.BlobSequenceDatum) self.assertEqual(last.step,", "result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( tags=[\"square\", \"quartic\"] ),", "dtype=tf.float64) for i in xrange(1, 11): histogram_summary.histogram( name, tensor *", "from tensorboard.plugins.scalar import summary_v2 as scalar_summary from tensorboard.plugins.image import metadata", "array of size 1 to a Python scalar\", ): provider.read_scalars(", "Use a non-monotonic sequence of sample sizes to # test", "blobs[2] blue2 = blobs[3] red1 = provider.read_blob( self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, )", "in result[run]: tensor_events = multiplexer.Tensors(run, tag) self.assertLen(result[run][tag], len(tensor_events)) for (datum,", "the most # recent datum, as specified by the interface.", "# Copyright 2019 The TensorFlow Authors. All Rights Reserved. #", "self.assertEqual(actual, expected) def test_inorder(self): xs = list(range(10000)) actual = data_provider._downsample(xs,", "metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"high_tide\", tensor=i, step=i, metadata=metadata ) #", "datum.numpy, tensor_util.make_ndarray(event.tensor_proto), ) def test_read_tensors_downsamples(self): multiplexer = self.create_multiplexer() provider =", "distribution, name) in data: tensor = tf.constant([distribution], dtype=tf.float64) for i", "return result multiplexer = FakeMultiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, \"fake_logdir\"", "\"quartic\"] ), ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"])", "reading the tfrecords, modifying the data, and writing # them", "self.assertLen(result[run][tag], len(tensor_events)) for (datum, event) in zip(result[run][tag], tensor_events): self.assertEqual(datum.step, event.step)", "run_tag_filter=base_provider.RunTagFilter( runs=[\"mondrian\", \"picasso\"], tags=[\"yellow\", \"green't\"] ), downsample=1, ) self.assertItemsEqual(result.keys(), [\"mondrian\"])", "{ \"second_2\": 2.0, \"first\": 1.5, \"no_time\": None, \"second_1\": 2.0, }", "(\"yellow\", (239, 220, 111), \"bottom-right\"), ] for (name, color, description)", "158), \"bottom-left\"), (\"yellow\", (239, 220, 111), \"bottom-right\"), ] for (name,", "self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\", histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], )", "self.assertEqual(sample.description, \"bottom-left\") self.assertEqual(sample.display_name, \"\") with self.subTest(\"filters by run/tag\"): result =", "License. # ============================================================================== \"\"\"Unit tests for `tensorboard.backend.event_processing.data_provider`.\"\"\" from __future__ import", "datum, as specified by the interface. multiplexer = self.create_multiplexer() provider", "# not written by V2 summary ops self.assertEqual(sample.description, \"very smooth\")", "self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"lebesgue\"][\"uniform\"], 3) def test_list_blob_sequences(self): provider", "list(\"abcdefg\") actual = data_provider._downsample(xs, k=10) expected = list(\"abcdefg\") self.assertIsNot(actual, xs)", "(\"very smoothn't\", (0.0, 0.01, 0.99, 1.0), \"bimodal\"), ] for (description,", "wall time, as it can't be mocked out self.assertEqual(sample.plugin_content, b\"\")", "[\"sine\", \"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]),", ") # Summary with rank-0 data but not owned by", "run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\",", "import tensorflow.compat.v2 as tf tf1.enable_eager_execution() class MultiplexerDataProviderTest(tf.test.TestCase): def setUp(self): super(MultiplexerDataProviderTest,", "with self.subTest(\"filters by run/tag\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME,", "= [\"second_2\", \"first\", \"no_time\", \"second_1\"] self.assertItemsEqual(result, start_times) return result def", "import metadata as scalar_metadata from tensorboard.plugins.scalar import summary_v2 as scalar_summary", ") def test_read_blob_sequences_and_read_blob(self): provider = self.create_provider() with self.subTest(\"reads all time", "\"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"] self.assertIsInstance(sample, base_provider.BlobSequenceTimeSeries) self.assertEqual(sample.max_step, 10)", "xs = \"abcdefg\" expected = data_provider._downsample(xs, k=4) for _ in", "= data_provider._downsample(xs, k=4) self.assertEqual(actual, expected) def test_underlong_ok(self): xs = list(\"abcdefg\")", "self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance( result[\"mondrian\"][\"yellow\"], base_provider.BlobSequenceTimeSeries, ) def test_read_blob_sequences_and_read_blob(self):", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "tensorboard.plugins.scalar import summary_v2 as scalar_summary from tensorboard.plugins.image import metadata as", "mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(sample.max_length, 6 + 2) self.assertEqual(sample.description, \"bottom-left\")", "self.ctx = context.RequestContext() logdir = os.path.join(self.logdir, \"polynomials\") with tf.summary.create_file_writer(logdir).as_default(): for", "event.wall_time) np.testing.assert_equal( datum.numpy, tensor_util.make_ndarray(event.tensor_proto), ) def test_read_tensors_downsamples(self): multiplexer = self.create_multiplexer()", "tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10): scalar_summary.scalar(\"sine\", tf.sin(float(i)), step=i) scalar_summary.scalar( \"square\",", "provider.read_blob( self.ctx, blob_key=result[\"mondrian\"][\"red\"][-1].values[2].blob_key, ) self.assertEqual(blue1, blue2) self.assertNotEqual(blue1, red1) with self.subTest(\"filters", "# No explicit checks yet. with six.assertRaisesRegex( self, ValueError, \"can", "plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"], [\"likely\"]), ) self.assertEqual(result, {}) def test_read_scalars(self): multiplexer =", "provider = self.create_provider() run_tag_filter = base_provider.RunTagFilter([\"waves\"], [\"bad\"]) # No explicit", "all time series for a plugin\"): result = provider.read_blob_sequences( self.ctx,", "actual = data_provider._downsample(xs, k=4) self.assertEqual(actual, expected) def test_underlong_ok(self): xs =", "tensorboard.data import provider as base_provider from tensorboard.plugins.graph import metadata as", "from tensorboard import context from tensorboard.backend.event_processing import data_provider from tensorboard.backend.event_processing", "= \"greetings\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"bad\", tensor=[i, i], step=i,", "logdir = os.path.join(self.logdir, \"waves\") with tf.summary.create_file_writer(logdir).as_default(): for i in xrange(10):", "* i, step=i, description=description ) logdir = os.path.join(self.logdir, \"mondrian\") with", "V2 summary ops self.assertEqual(sample.description, \"boxen\") def test_list_scalars_filters(self): provider = self.create_provider()", "data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME,", "as graph_metadata from tensorboard.plugins.histogram import metadata as histogram_metadata from tensorboard.plugins.histogram", "\"greetings\", \"marigraphs\", graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_runs(self):", "self.create_provider() with self.subTest(\"finds all time series for a plugin\"): result", "provider as base_provider from tensorboard.plugins.graph import metadata as graph_metadata from", "): provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=\"greetings\", run_tag_filter=run_tag_filter, downsample=100, ) def test_list_tensors_all(self):", "tensorboard.util import tensor_util import tensorflow.compat.v1 as tf1 import tensorflow.compat.v2 as", "scalar_summary from tensorboard.plugins.image import metadata as image_metadata from tensorboard.plugins.image import", "Version 2.0 (the \"License\"); # you may not use this", "abs(6 - i) # 1, .., 6, .., 2 #", "self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"])", "sample = result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample, base_provider.TensorTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to", "sorted(actual)) def test_zero(self): xs = \"abcdefg\" actual = data_provider._downsample(xs, k=0)", "] self.assertEqual(blobs[0], b\"10\") self.assertEqual(blobs[1], b\"10\") self.assertStartsWith(blobs[2], b\"\\x89PNG\") self.assertStartsWith(blobs[3], b\"\\x89PNG\") blue1", "__future__ import absolute_import from __future__ import division from __future__ import", "result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"polynomials\",", "self.create_provider() result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(),", "= self.create_provider() result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None, )", "result def FirstEventTimestamp(multiplexer, run): self.assertIn(run, start_times) result = start_times[run] if", "import summary_v2 as histogram_summary from tensorboard.plugins.scalar import metadata as scalar_metadata", "in data: tensor = tf.constant([distribution], dtype=tf.float64) for i in xrange(1,", "by applicable law or agreed to in writing, software #", "np from tensorboard import context from tensorboard.backend.event_processing import data_provider from", "the tfrecords, modifying the data, and writing # them back", "= data_provider.MultiplexerDataProvider( multiplexer, \"fake_logdir\" ) result = provider.list_runs(self.ctx, experiment_id=\"unused\") self.assertItemsEqual(", "tag) self.assertLen(result[run][tag], len(tensor_events)) for (datum, event) in zip(result[run][tag], tensor_events): self.assertEqual(datum.step,", "# recent datum, as specified by the interface. multiplexer =", "sequence of sample sizes to # test `max_length` calculation. k", "provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=\"greetings\", run_tag_filter=run_tag_filter, downsample=100, ) def test_list_tensors_all(self): provider", "self.assertItemsEqual(result[\"waves\"].keys(), [\"sine\", \"square\"]) result = provider.list_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"un\"],", "step=i, metadata=metadata ) # Summary with rank-1 data of scalar", "for a plugin\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, )", "self.subTest(\"filters by run/tag\"): result = provider.list_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter(", "run_name=run, start_time=start_time ) for (run, start_time) in six.iteritems(start_times) ], )", "result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(),", "self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\") self.assertItemsEqual( result, [ \"greetings\", \"marigraphs\",", ") run_tag_filter = base_provider.RunTagFilter( runs=[\"waves\", \"polynomials\", \"unicorns\"], tags=[\"sine\", \"square\", \"cube\",", "blob_key=v.blob_key) for v in last.values ] self.assertEqual(blobs[0], b\"10\") self.assertEqual(blobs[1], b\"10\")", "self.assertEqual(sample.max_step, 10) # nothing to test for wall time, as", "with six.assertRaisesRegex( self, ValueError, \"can only convert an array of", "plugin_name=\"greetings\", run_tag_filter=run_tag_filter, downsample=100, ) def test_list_tensors_all(self): provider = self.create_provider() result", "test_list_runs(self): # We can't control the timestamps of events written", "runs=[\"lebesgue\"], tags=[\"uniform\", \"bimodal\"], ) result = provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME,", "- abs(6 - i) # 1, .., 6, .., 2", "provider.read_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(),", "provider.data_location(self.ctx, experiment_id=\"unused\") self.assertEqual(result, self.logdir) def test_list_plugins_with_no_graph(self): provider = self.create_provider() result", "self.create_provider() run_tag_filter = base_provider.RunTagFilter([\"waves\"], [\"bad\"]) # No explicit checks yet.", "def create_provider(self): multiplexer = self.create_multiplexer() return data_provider.MultiplexerDataProvider(multiplexer, self.logdir) def test_data_location(self):", "xs = list(range(10000)) actual = data_provider._downsample(xs, k=100) self.assertEqual(actual, sorted(actual)) def", "(221, 28, 38), \"top-right\"), (\"blue\", (1, 91, 158), \"bottom-left\"), (\"yellow\",", "to test for wall time, as it can't be mocked", "1.5, \"no_time\": None, \"second_1\": 2.0, } class FakeMultiplexer(object): def Runs(multiplexer):", "import metadata as graph_metadata from tensorboard.plugins.histogram import metadata as histogram_metadata", "scalar_summary.scalar( \"square\", tf.sign(tf.sin(float(i))), step=i ) # Summary with rank-0 data", "\"cube\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\", \"sine\"]) for run in result: for tag", "applicable law or agreed to in writing, software # distributed", "as it can't be mocked out self.assertEqual(sample.plugin_content, b\"\") self.assertEqual(sample.max_length, 6", "tensor_events): self.assertEqual(datum.step, event.step) self.assertEqual(datum.wall_time, event.wall_time) np.testing.assert_equal( datum.numpy, tensor_util.make_ndarray(event.tensor_proto), ) def", ") result = provider.read_scalars( self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, downsample=3, ) self.assertLen(result[\"waves\"][\"sine\"],", "= data_provider._downsample(xs, k=4) for _ in range(100): actual = data_provider._downsample(xs,", "= self.create_provider() with self.subTest(\"finds all time series for a plugin\"):", "[\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\", \"yellow\"] ) sample = result[\"mondrian\"][\"blue\"]", "from tensorboard.plugins.histogram import metadata as histogram_metadata from tensorboard.plugins.histogram import summary_v2", "sizes to # test `max_length` calculation. k = 6 -", "result[\"lebesgue\"][\"uniform\"] self.assertIsInstance(sample, base_provider.TensorTimeSeries) self.assertEqual(sample.max_step, 10) # nothing to test for", "\"top-right\"), (\"blue\", (1, 91, 158), \"bottom-left\"), (\"yellow\", (239, 220, 111),", "# Quick check only, as scalars and tensors use the", "\"picasso\"], tags=[\"yellow\", \"green't\"] ), ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual(result[\"mondrian\"].keys(), [\"yellow\"]) self.assertIsInstance(", "run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) sample = result[\"lebesgue\"][\"uniform\"]", "start_time=start_time ) for (run, start_time) in six.iteritems(start_times) ], ) def", "in data: image_1x1 = tf.constant([[[color]]], dtype=tf.uint8) for i in xrange(1,", "self.logdir) def test_list_plugins_with_no_graph(self): provider = self.create_provider() result = provider.list_plugins(self.ctx, experiment_id=\"unused\")", "def test_read_scalars(self): multiplexer = self.create_multiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir", "plugin. metadata = summary_pb2.SummaryMetadata() metadata.plugin_data.plugin_name = \"marigraphs\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR", "# You may obtain a copy of the License at", "tensor=i, step=i, metadata=metadata ) # Summary with rank-1 data of", "], ) def test_list_runs(self): # We can't control the timestamps", "self.ctx, experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=None, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\",", "plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"polynomials\", \"waves\"]) self.assertItemsEqual(result[\"polynomials\"].keys(), [\"square\", \"cube\"])", "ValueError, \"can only convert an array of size 1 to", "for i in xrange(1, 11): # Use a non-monotonic sequence", "import os import six from six.moves import xrange # pylint:", "as event_multiplexer, ) from tensorboard.compat.proto import summary_pb2 from tensorboard.data import", "with self.subTest(\"filters by run/tag\"): result = provider.read_blob_sequences( self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME,", "can't control the timestamps of events written to disk (without", "experiment_id=\"unused\", plugin_name=scalar_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter([\"waves\"], [\"square\"]), ) self.assertItemsEqual(result.keys(), [\"waves\"]) self.assertItemsEqual(result[\"waves\"].keys(), [\"square\"]) result", "experiment_id=\"unused\") self.assertItemsEqual( result, [ base_provider.Run( run_id=run, run_name=run, start_time=start_time ) for", "implementation. result = provider.list_tensors( self.ctx, experiment_id=\"unused\", plugin_name=histogram_metadata.PLUGIN_NAME, run_tag_filter=base_provider.RunTagFilter( [\"lebesgue\"], [\"uniform\"]", "self.ctx, experiment_id=\"unused\", plugin_name=image_metadata.PLUGIN_NAME, ) self.assertItemsEqual(result.keys(), [\"mondrian\"]) self.assertItemsEqual( result[\"mondrian\"].keys(), [\"red\", \"blue\",", "in xrange(1, 11): histogram_summary.histogram( name, tensor * i, step=i, description=description", "run in result: for tag in result[run]: tensor_events = multiplexer.Tensors(run,", "(1, 91, 158), \"bottom-left\"), (\"yellow\", (239, 220, 111), \"bottom-right\"), ]", "multiplexer instead. start_times = { \"second_2\": 2.0, \"first\": 1.5, \"no_time\":", "list(\"abcdefg\") self.assertIsNot(actual, xs) self.assertEqual(actual, expected) def test_inorder(self): xs = list(range(10000))", "could be found\") else: return result multiplexer = FakeMultiplexer() provider", "an array of size 1 to a Python scalar\", ):", "graph_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME, scalar_metadata.PLUGIN_NAME, ], ) def test_list_runs(self): # We", "import metadata as image_metadata from tensorboard.plugins.image import summary_v2 as image_summary", "provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir ) run_tag_filter = base_provider.RunTagFilter( runs=[\"lebesgue\"],", "\"sine\"]) for run in result: for tag in result[run]: tensor_events", "self.assertIn(run, start_times) result = start_times[run] if result is None: raise", "self.subTest(\"reads all time series for a plugin\"): result = provider.read_blob_sequences(", "\"License\"); # you may not use this file except in", "def test_list_tensors_filters(self): provider = self.create_provider() # Quick check only, as", "b\"\\x89PNG\") self.assertStartsWith(blobs[3], b\"\\x89PNG\") blue1 = blobs[2] blue2 = blobs[3] red1", "\"square\", i ** 2, step=2 * i, description=\"boxen\" ) scalar_summary.scalar(\"cube\",", "multiplexer = FakeMultiplexer() provider = data_provider.MultiplexerDataProvider( multiplexer, \"fake_logdir\" ) result", "run_tag_filter=run_tag_filter, downsample=100, ) self.assertItemsEqual(result.keys(), [\"lebesgue\"]) self.assertItemsEqual(result[\"lebesgue\"].keys(), [\"uniform\", \"bimodal\"]) for run", "k=4) self.assertEqual(actual, expected) def test_underlong_ok(self): xs = list(\"abcdefg\") actual =", "graph: writer = tf.compat.v1.summary.FileWriter(self.logdir) writer.add_graph(graph) writer.flush() provider = self.create_provider() result", "V2 summary ops self.assertEqual(sample.description, \"very smooth\") def test_list_tensors_filters(self): provider =", "summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"bad\", tensor=[i, i], step=i, metadata=metadata ) logdir =", "tf.summary.create_file_writer(logdir).as_default(): data = [ (\"red\", (221, 28, 38), \"top-right\"), (\"blue\",", "tensorflow.compat.v1 as tf1 import tensorflow.compat.v2 as tf tf1.enable_eager_execution() class MultiplexerDataProviderTest(tf.test.TestCase):", "yet. with six.assertRaisesRegex( self, ValueError, \"can only convert an array", "as image_summary from tensorboard.util import tensor_util import tensorflow.compat.v1 as tf1", "from tensorboard.plugins.histogram import summary_v2 as histogram_summary from tensorboard.plugins.scalar import metadata", "self.create_provider() with self.subTest(\"reads all time series for a plugin\"): result", "metadata.plugin_data.plugin_name = \"marigraphs\" metadata.data_class = summary_pb2.DATA_CLASS_SCALAR tf.summary.write( \"high_tide\", tensor=i, step=i,", "4) # downsampled from 10 last = sample[-1] self.assertIsInstance(last, base_provider.BlobSequenceDatum)", "i) logdir = os.path.join(self.logdir, \"waves\") with tf.summary.create_file_writer(logdir).as_default(): for i in", "check only, as scalars and tensors use the same underlying", "raise ValueError(\"No event timestep could be found\") else: return result", "= context.RequestContext() logdir = os.path.join(self.logdir, \"polynomials\") with tf.summary.create_file_writer(logdir).as_default(): for i" ]
[ "eval_steps=eval_steps, **experiment_args ) return _experiment_fn if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO)", "can be found. train_batch_size: Batch size during training. train_batch_size: Batch", "arguments.pop('job_dir', None) arguments.pop('job-dir', None) output_dir = arguments.pop('output_dir') # Run the", ") parser.add_argument( '--hidden', help='Units in hidden layers.', default=(), nargs='+', type=int", ") parser.add_argument( '--train_batch_size', help='Batch size for training steps', type=int, default=100", "params = tf.contrib.training.HParams( cell_size=cell_size, hidden=hidden or None, # Default is", "eval_steps: Number of evaluation steps. cell_size: LSTM cell size. hidden:", "len(classes) params = tf.contrib.training.HParams( cell_size=cell_size, hidden=hidden or None, # Default", "of steps to run evalution for at each checkpoint', default=100,", "to run evalution for at each checkpoint', default=100, type=int )", "parameters. parser.add_argument( '--cell_size', help='LSTM cell size.', default=256, type=int ) parser.add_argument(", "the training job for.', type=int, default=10000 ) parser.add_argument( '--eval_steps', help='Number", "train_steps, eval_steps, cell_size, hidden, **experiment_args): \"\"\"Returns experiment_fn for a RNN", "for training on Cloud ML.\"\"\" import argparse, glob, os import", ") config = tf.contrib.learn.RunConfig() def _experiment_fn(output_dir): return tf.contrib.learn.Experiment( model.build_estimator(output_dir, n_classes,", "parser.add_argument( '--eval_delay_secs', help='How long to wait before running first evaluation',", "field, but it is required by gcloud', default='junk' ) parser.add_argument(", "wait before running first evaluation', default=10, type=int ) parser.add_argument( '--min_eval_frequency',", "help='LSTM cell size.', default=256, type=int ) parser.add_argument( '--hidden', help='Units in", "argparse.ArgumentParser() parser.add_argument( '--data_dir', help='GCS or local path to training data',", "\"use default\" wich is equivalent to [] -- see code", "= tf.contrib.learn.RunConfig() def _experiment_fn(output_dir): return tf.contrib.learn.Experiment( model.build_estimator(output_dir, n_classes, params, config),", "[] -- see code in model). experiment_args: Additional arguments when", "config), train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'train-*'), batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'eval-*'), batch_size=eval_batch_size), export_strategies=[", "Args: data_dir: Where {train,eval}-* tf.train.Example datasets can be found. train_batch_size:", "argparse, glob, os import tensorflow as tf # From this", "'--eval_batch_size', help='Batch size for evaluation steps', type=int, default=100 ) parser.add_argument(", "<filename>extras/amld/cloud/quickdraw_rnn/task.py \"\"\"Experiment wrapper for training on Cloud ML.\"\"\" import argparse,", "parser.add_argument( '--data_dir', help='GCS or local path to training data', required=True", "help='GCS or local path to training data', required=True ) parser.add_argument(", "this package. import model def generate_experiment_fn(data_dir, train_batch_size, eval_batch_size, train_steps, eval_steps,", "model.build_estimator(output_dir, n_classes, params, config), train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'train-*'), batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir,", "model.serving_input_fn, exports_to_keep=1) ], train_steps=train_steps, eval_steps=eval_steps, **experiment_args ) return _experiment_fn if", "**experiment_args ) return _experiment_fn if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) parser", "to run the training job for.', type=int, default=10000 ) parser.add_argument(", "is empty list. ) config = tf.contrib.learn.RunConfig() def _experiment_fn(output_dir): return", ") parser.add_argument( '--eval_steps', help='Number of steps to run evalution for", "help='How long to wait before running first evaluation', default=10, type=int", "os import tensorflow as tf # From this package. import", "size during evaluation. train_steps: Number of training steps. eval_steps: Number", "'--min_eval_frequency', help='Minimum number of training steps between evaluations', default=1, type=int", "Default is empty list. ) config = tf.contrib.learn.RunConfig() def _experiment_fn(output_dir):", "parser.add_argument( '--hidden', help='Units in hidden layers.', default=(), nargs='+', type=int )", "`tf.contrib.learn.Experiment` is instantiated. \"\"\" classes = tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines() n_classes", "args provided by service arguments.pop('job_dir', None) arguments.pop('job-dir', None) output_dir =", "__name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) parser = argparse.ArgumentParser() parser.add_argument( '--data_dir', help='GCS", "cell_size: LSTM cell size. hidden: Number of units in hidden", "args.__dict__ # unused args provided by service arguments.pop('job_dir', None) arguments.pop('job-dir',", "default=10, type=int ) parser.add_argument( '--min_eval_frequency', help='Minimum number of training steps", "parser.add_argument( '--eval_batch_size', help='Batch size for evaluation steps', type=int, default=100 )", "evaluation steps', type=int, default=100 ) parser.add_argument( '--train_steps', help='Steps to run", "steps. cell_size: LSTM cell size. hidden: Number of units in", "args = parser.parse_args() arguments = args.__dict__ # unused args provided", "evaluations', default=1, type=int ) # Hyper parameters. parser.add_argument( '--cell_size', help='LSTM", "equivalent to [] -- see code in model). experiment_args: Additional", "def generate_experiment_fn(data_dir, train_batch_size, eval_batch_size, train_steps, eval_steps, cell_size, hidden, **experiment_args): \"\"\"Returns", "package. import model def generate_experiment_fn(data_dir, train_batch_size, eval_batch_size, train_steps, eval_steps, cell_size,", "it is required by gcloud', default='junk' ) parser.add_argument( '--eval_delay_secs', help='How", "parser.add_argument( '--train_steps', help='Steps to run the training job for.', type=int,", "default=10000 ) parser.add_argument( '--eval_steps', help='Number of steps to run evalution", "and export models', required=True ) parser.add_argument( '--job-dir', help='this model ignores", "LSTM cell size. hidden: Number of units in hidden layers", "be found. train_batch_size: Batch size during training. train_batch_size: Batch size", "running first evaluation', default=10, type=int ) parser.add_argument( '--min_eval_frequency', help='Minimum number", ") parser.add_argument( '--eval_delay_secs', help='How long to wait before running first", "params, config), train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'train-*'), batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'eval-*'), batch_size=eval_batch_size),", "a RNN classifier. Args: data_dir: Where {train,eval}-* tf.train.Example datasets can", "files_pattern=os.path.join(data_dir, 'eval-*'), batch_size=eval_batch_size), export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn, exports_to_keep=1) ], train_steps=train_steps, eval_steps=eval_steps,", "to wait before running first evaluation', default=10, type=int ) parser.add_argument(", "train_batch_size: Batch size during evaluation. train_steps: Number of training steps.", "training steps. eval_steps: Number of evaluation steps. cell_size: LSTM cell", "data', required=True ) parser.add_argument( '--train_batch_size', help='Batch size for training steps',", "that None means \"use default\" wich is equivalent to []", "return _experiment_fn if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) parser = argparse.ArgumentParser()", "to [] -- see code in model). experiment_args: Additional arguments", "= argparse.ArgumentParser() parser.add_argument( '--data_dir', help='GCS or local path to training", "help='Batch size for training steps', type=int, default=100 ) parser.add_argument( '--eval_batch_size',", "size for evaluation steps', type=int, default=100 ) parser.add_argument( '--train_steps', help='Steps", "parser.add_argument( '--eval_steps', help='Number of steps to run evalution for at", "Batch size during evaluation. train_steps: Number of training steps. eval_steps:", "size during training. train_batch_size: Batch size during evaluation. train_steps: Number", ") parser.add_argument( '--train_steps', help='Steps to run the training job for.',", "config = tf.contrib.learn.RunConfig() def _experiment_fn(output_dir): return tf.contrib.learn.Experiment( model.build_estimator(output_dir, n_classes, params,", "model ignores this field, but it is required by gcloud',", "hidden layers (note that None means \"use default\" wich is", "to training data', required=True ) parser.add_argument( '--train_batch_size', help='Batch size for", ") args = parser.parse_args() arguments = args.__dict__ # unused args", "cell_size=cell_size, hidden=hidden or None, # Default is empty list. )", "arguments.pop('job-dir', None) output_dir = arguments.pop('output_dir') # Run the training job", "(note that None means \"use default\" wich is equivalent to", "for training steps', type=int, default=100 ) parser.add_argument( '--eval_batch_size', help='Batch size", "Number of evaluation steps. cell_size: LSTM cell size. hidden: Number", ") return _experiment_fn if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) parser =", "location to write checkpoints and export models', required=True ) parser.add_argument(", "% data_dir).read().splitlines() n_classes = len(classes) params = tf.contrib.training.HParams( cell_size=cell_size, hidden=hidden", "RNN classifier. Args: data_dir: Where {train,eval}-* tf.train.Example datasets can be", "type=int ) args = parser.parse_args() arguments = args.__dict__ # unused", "checkpoints and export models', required=True ) parser.add_argument( '--job-dir', help='this model", "tensorflow as tf # From this package. import model def", "hidden: Number of units in hidden layers (note that None", ") parser.add_argument( '--job-dir', help='this model ignores this field, but it", "help='Units in hidden layers.', default=(), nargs='+', type=int ) args =", "type=int ) parser.add_argument( '--min_eval_frequency', help='Minimum number of training steps between", "\"\"\" classes = tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines() n_classes = len(classes) params", "'--train_steps', help='Steps to run the training job for.', type=int, default=10000", "tf.logging.set_verbosity(tf.logging.INFO) parser = argparse.ArgumentParser() parser.add_argument( '--data_dir', help='GCS or local path", ") parser.add_argument( '--min_eval_frequency', help='Minimum number of training steps between evaluations',", "import tensorflow as tf # From this package. import model", "import argparse, glob, os import tensorflow as tf # From", "in model). experiment_args: Additional arguments when `tf.contrib.learn.Experiment` is instantiated. \"\"\"", "= arguments.pop('output_dir') # Run the training job tf.contrib.learn.learn_runner.run( generate_experiment_fn(**arguments), output_dir)", "required=True ) parser.add_argument( '--train_batch_size', help='Batch size for training steps', type=int,", "at each checkpoint', default=100, type=int ) parser.add_argument( '--output_dir', help='GCS location", "'__main__': tf.logging.set_verbosity(tf.logging.INFO) parser = argparse.ArgumentParser() parser.add_argument( '--data_dir', help='GCS or local", "models', required=True ) parser.add_argument( '--job-dir', help='this model ignores this field,", "or local path to training data', required=True ) parser.add_argument( '--train_batch_size',", "help='Batch size for evaluation steps', type=int, default=100 ) parser.add_argument( '--train_steps',", "'--cell_size', help='LSTM cell size.', default=256, type=int ) parser.add_argument( '--hidden', help='Units", "output_dir = arguments.pop('output_dir') # Run the training job tf.contrib.learn.learn_runner.run( generate_experiment_fn(**arguments),", "steps', type=int, default=100 ) parser.add_argument( '--eval_batch_size', help='Batch size for evaluation", "evaluation steps. cell_size: LSTM cell size. hidden: Number of units", "or None, # Default is empty list. ) config =", "data_dir: Where {train,eval}-* tf.train.Example datasets can be found. train_batch_size: Batch", "as tf # From this package. import model def generate_experiment_fn(data_dir,", "**experiment_args): \"\"\"Returns experiment_fn for a RNN classifier. Args: data_dir: Where", "'--train_batch_size', help='Batch size for training steps', type=int, default=100 ) parser.add_argument(", "'train-*'), batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'eval-*'), batch_size=eval_batch_size), export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn, exports_to_keep=1)", ") # Hyper parameters. parser.add_argument( '--cell_size', help='LSTM cell size.', default=256,", "glob, os import tensorflow as tf # From this package.", "_experiment_fn(output_dir): return tf.contrib.learn.Experiment( model.build_estimator(output_dir, n_classes, params, config), train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'train-*'),", "hidden layers.', default=(), nargs='+', type=int ) args = parser.parse_args() arguments", "batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'eval-*'), batch_size=eval_batch_size), export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn, exports_to_keep=1) ],", "is instantiated. \"\"\" classes = tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines() n_classes =", "tf.contrib.learn.RunConfig() def _experiment_fn(output_dir): return tf.contrib.learn.Experiment( model.build_estimator(output_dir, n_classes, params, config), train_input_fn=model.make_input_fn_stroke(", "n_classes, params, config), train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'train-*'), batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'eval-*'),", "if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) parser = argparse.ArgumentParser() parser.add_argument( '--data_dir',", "for evaluation steps', type=int, default=100 ) parser.add_argument( '--train_steps', help='Steps to", "help='Number of steps to run evalution for at each checkpoint',", "by service arguments.pop('job_dir', None) arguments.pop('job-dir', None) output_dir = arguments.pop('output_dir') #", "# Default is empty list. ) config = tf.contrib.learn.RunConfig() def", "parser.add_argument( '--cell_size', help='LSTM cell size.', default=256, type=int ) parser.add_argument( '--hidden',", "model def generate_experiment_fn(data_dir, train_batch_size, eval_batch_size, train_steps, eval_steps, cell_size, hidden, **experiment_args):", "default=100 ) parser.add_argument( '--train_steps', help='Steps to run the training job", "each checkpoint', default=100, type=int ) parser.add_argument( '--output_dir', help='GCS location to", ") parser.add_argument( '--eval_batch_size', help='Batch size for evaluation steps', type=int, default=100", "unused args provided by service arguments.pop('job_dir', None) arguments.pop('job-dir', None) output_dir", "eval_batch_size, train_steps, eval_steps, cell_size, hidden, **experiment_args): \"\"\"Returns experiment_fn for a", "Batch size during training. train_batch_size: Batch size during evaluation. train_steps:", "arguments = args.__dict__ # unused args provided by service arguments.pop('job_dir',", "train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'train-*'), batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'eval-*'), batch_size=eval_batch_size), export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy(", "_experiment_fn if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) parser = argparse.ArgumentParser() parser.add_argument(", "but it is required by gcloud', default='junk' ) parser.add_argument( '--eval_delay_secs',", "train_batch_size: Batch size during training. train_batch_size: Batch size during evaluation.", "is equivalent to [] -- see code in model). experiment_args:", "nargs='+', type=int ) args = parser.parse_args() arguments = args.__dict__ #", "# Hyper parameters. parser.add_argument( '--cell_size', help='LSTM cell size.', default=256, type=int", "default=100, type=int ) parser.add_argument( '--output_dir', help='GCS location to write checkpoints", "required by gcloud', default='junk' ) parser.add_argument( '--eval_delay_secs', help='How long to", "default='junk' ) parser.add_argument( '--eval_delay_secs', help='How long to wait before running", "None) output_dir = arguments.pop('output_dir') # Run the training job tf.contrib.learn.learn_runner.run(", "== '__main__': tf.logging.set_verbosity(tf.logging.INFO) parser = argparse.ArgumentParser() parser.add_argument( '--data_dir', help='GCS or", "path to training data', required=True ) parser.add_argument( '--train_batch_size', help='Batch size", "by gcloud', default='junk' ) parser.add_argument( '--eval_delay_secs', help='How long to wait", "wich is equivalent to [] -- see code in model).", "import model def generate_experiment_fn(data_dir, train_batch_size, eval_batch_size, train_steps, eval_steps, cell_size, hidden,", "default=(), nargs='+', type=int ) args = parser.parse_args() arguments = args.__dict__", "return tf.contrib.learn.Experiment( model.build_estimator(output_dir, n_classes, params, config), train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'train-*'), batch_size=train_batch_size),", "def _experiment_fn(output_dir): return tf.contrib.learn.Experiment( model.build_estimator(output_dir, n_classes, params, config), train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir,", "{train,eval}-* tf.train.Example datasets can be found. train_batch_size: Batch size during", "parser.parse_args() arguments = args.__dict__ # unused args provided by service", "hidden, **experiment_args): \"\"\"Returns experiment_fn for a RNN classifier. Args: data_dir:", "write checkpoints and export models', required=True ) parser.add_argument( '--job-dir', help='this", "type=int, default=100 ) parser.add_argument( '--train_steps', help='Steps to run the training", "of training steps. eval_steps: Number of evaluation steps. cell_size: LSTM", "experiment_args: Additional arguments when `tf.contrib.learn.Experiment` is instantiated. \"\"\" classes =", "evaluation', default=10, type=int ) parser.add_argument( '--min_eval_frequency', help='Minimum number of training", "= tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines() n_classes = len(classes) params = tf.contrib.training.HParams(", "\"\"\"Returns experiment_fn for a RNN classifier. Args: data_dir: Where {train,eval}-*", "type=int ) parser.add_argument( '--output_dir', help='GCS location to write checkpoints and", "= parser.parse_args() arguments = args.__dict__ # unused args provided by", "= args.__dict__ # unused args provided by service arguments.pop('job_dir', None)", "units in hidden layers (note that None means \"use default\"", "job for.', type=int, default=10000 ) parser.add_argument( '--eval_steps', help='Number of steps", "default=1, type=int ) # Hyper parameters. parser.add_argument( '--cell_size', help='LSTM cell", "parser.add_argument( '--train_batch_size', help='Batch size for training steps', type=int, default=100 )", "= len(classes) params = tf.contrib.training.HParams( cell_size=cell_size, hidden=hidden or None, #", "during training. train_batch_size: Batch size during evaluation. train_steps: Number of", "From this package. import model def generate_experiment_fn(data_dir, train_batch_size, eval_batch_size, train_steps,", "None) arguments.pop('job-dir', None) output_dir = arguments.pop('output_dir') # Run the training", "training steps', type=int, default=100 ) parser.add_argument( '--eval_batch_size', help='Batch size for", "'--eval_steps', help='Number of steps to run evalution for at each", "when `tf.contrib.learn.Experiment` is instantiated. \"\"\" classes = tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines()", "data_dir).read().splitlines() n_classes = len(classes) params = tf.contrib.training.HParams( cell_size=cell_size, hidden=hidden or", "to write checkpoints and export models', required=True ) parser.add_argument( '--job-dir',", "tf # From this package. import model def generate_experiment_fn(data_dir, train_batch_size,", "of units in hidden layers (note that None means \"use", "type=int, default=10000 ) parser.add_argument( '--eval_steps', help='Number of steps to run", "type=int ) parser.add_argument( '--hidden', help='Units in hidden layers.', default=(), nargs='+',", "size.', default=256, type=int ) parser.add_argument( '--hidden', help='Units in hidden layers.',", "'--hidden', help='Units in hidden layers.', default=(), nargs='+', type=int ) args", "Number of training steps. eval_steps: Number of evaluation steps. cell_size:", "between evaluations', default=1, type=int ) # Hyper parameters. parser.add_argument( '--cell_size',", "export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn, exports_to_keep=1) ], train_steps=train_steps, eval_steps=eval_steps, **experiment_args ) return", "tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn, exports_to_keep=1) ], train_steps=train_steps, eval_steps=eval_steps, **experiment_args ) return _experiment_fn", "hidden=hidden or None, # Default is empty list. ) config", "steps. eval_steps: Number of evaluation steps. cell_size: LSTM cell size.", "generate_experiment_fn(data_dir, train_batch_size, eval_batch_size, train_steps, eval_steps, cell_size, hidden, **experiment_args): \"\"\"Returns experiment_fn", "help='Steps to run the training job for.', type=int, default=10000 )", "default=100 ) parser.add_argument( '--eval_batch_size', help='Batch size for evaluation steps', type=int,", "list. ) config = tf.contrib.learn.RunConfig() def _experiment_fn(output_dir): return tf.contrib.learn.Experiment( model.build_estimator(output_dir,", "type=int, default=100 ) parser.add_argument( '--eval_batch_size', help='Batch size for evaluation steps',", "long to wait before running first evaluation', default=10, type=int )", "parser = argparse.ArgumentParser() parser.add_argument( '--data_dir', help='GCS or local path to", "export models', required=True ) parser.add_argument( '--job-dir', help='this model ignores this", "for a RNN classifier. Args: data_dir: Where {train,eval}-* tf.train.Example datasets", "'--data_dir', help='GCS or local path to training data', required=True )", "in hidden layers.', default=(), nargs='+', type=int ) args = parser.parse_args()", "first evaluation', default=10, type=int ) parser.add_argument( '--min_eval_frequency', help='Minimum number of", "this field, but it is required by gcloud', default='junk' )", "ignores this field, but it is required by gcloud', default='junk'", "help='GCS location to write checkpoints and export models', required=True )", "of training steps between evaluations', default=1, type=int ) # Hyper", "found. train_batch_size: Batch size during training. train_batch_size: Batch size during", "tf.contrib.training.HParams( cell_size=cell_size, hidden=hidden or None, # Default is empty list.", "steps', type=int, default=100 ) parser.add_argument( '--train_steps', help='Steps to run the", "Hyper parameters. parser.add_argument( '--cell_size', help='LSTM cell size.', default=256, type=int )", "provided by service arguments.pop('job_dir', None) arguments.pop('job-dir', None) output_dir = arguments.pop('output_dir')", "training on Cloud ML.\"\"\" import argparse, glob, os import tensorflow", "classifier. Args: data_dir: Where {train,eval}-* tf.train.Example datasets can be found.", ") parser.add_argument( '--output_dir', help='GCS location to write checkpoints and export", "\"\"\"Experiment wrapper for training on Cloud ML.\"\"\" import argparse, glob,", "tf.contrib.learn.Experiment( model.build_estimator(output_dir, n_classes, params, config), train_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'train-*'), batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke(", "parser.add_argument( '--output_dir', help='GCS location to write checkpoints and export models',", "see code in model). experiment_args: Additional arguments when `tf.contrib.learn.Experiment` is", "experiment_fn for a RNN classifier. Args: data_dir: Where {train,eval}-* tf.train.Example", "arguments when `tf.contrib.learn.Experiment` is instantiated. \"\"\" classes = tf.gfile.Open('%s/labels.txt' %", "Additional arguments when `tf.contrib.learn.Experiment` is instantiated. \"\"\" classes = tf.gfile.Open('%s/labels.txt'", "], train_steps=train_steps, eval_steps=eval_steps, **experiment_args ) return _experiment_fn if __name__ ==", "size for training steps', type=int, default=100 ) parser.add_argument( '--eval_batch_size', help='Batch", "parser.add_argument( '--job-dir', help='this model ignores this field, but it is", "number of training steps between evaluations', default=1, type=int ) #", "layers.', default=(), nargs='+', type=int ) args = parser.parse_args() arguments =", "gcloud', default='junk' ) parser.add_argument( '--eval_delay_secs', help='How long to wait before", "'--eval_delay_secs', help='How long to wait before running first evaluation', default=10,", "ML.\"\"\" import argparse, glob, os import tensorflow as tf #", "Where {train,eval}-* tf.train.Example datasets can be found. train_batch_size: Batch size", "means \"use default\" wich is equivalent to [] -- see", "training job for.', type=int, default=10000 ) parser.add_argument( '--eval_steps', help='Number of", "None, # Default is empty list. ) config = tf.contrib.learn.RunConfig()", "cell_size, hidden, **experiment_args): \"\"\"Returns experiment_fn for a RNN classifier. Args:", "tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines() n_classes = len(classes) params = tf.contrib.training.HParams( cell_size=cell_size,", "train_batch_size, eval_batch_size, train_steps, eval_steps, cell_size, hidden, **experiment_args): \"\"\"Returns experiment_fn for", "is required by gcloud', default='junk' ) parser.add_argument( '--eval_delay_secs', help='How long", "during evaluation. train_steps: Number of training steps. eval_steps: Number of", "tf.train.Example datasets can be found. train_batch_size: Batch size during training.", "cell size.', default=256, type=int ) parser.add_argument( '--hidden', help='Units in hidden", "help='Minimum number of training steps between evaluations', default=1, type=int )", "in hidden layers (note that None means \"use default\" wich", "help='this model ignores this field, but it is required by", "default\" wich is equivalent to [] -- see code in", "training data', required=True ) parser.add_argument( '--train_batch_size', help='Batch size for training", "type=int ) # Hyper parameters. parser.add_argument( '--cell_size', help='LSTM cell size.',", "size. hidden: Number of units in hidden layers (note that", "code in model). experiment_args: Additional arguments when `tf.contrib.learn.Experiment` is instantiated.", "cell size. hidden: Number of units in hidden layers (note", "training. train_batch_size: Batch size during evaluation. train_steps: Number of training", "Number of units in hidden layers (note that None means", "service arguments.pop('job_dir', None) arguments.pop('job-dir', None) output_dir = arguments.pop('output_dir') # Run", "eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'eval-*'), batch_size=eval_batch_size), export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn, exports_to_keep=1) ], train_steps=train_steps,", "'eval-*'), batch_size=eval_batch_size), export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn, exports_to_keep=1) ], train_steps=train_steps, eval_steps=eval_steps, **experiment_args", "wrapper for training on Cloud ML.\"\"\" import argparse, glob, os", "run evalution for at each checkpoint', default=100, type=int ) parser.add_argument(", "evaluation. train_steps: Number of training steps. eval_steps: Number of evaluation", "classes = tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines() n_classes = len(classes) params =", "training steps between evaluations', default=1, type=int ) # Hyper parameters.", "train_steps=train_steps, eval_steps=eval_steps, **experiment_args ) return _experiment_fn if __name__ == '__main__':", "on Cloud ML.\"\"\" import argparse, glob, os import tensorflow as", "run the training job for.', type=int, default=10000 ) parser.add_argument( '--eval_steps',", "evalution for at each checkpoint', default=100, type=int ) parser.add_argument( '--output_dir',", "# From this package. import model def generate_experiment_fn(data_dir, train_batch_size, eval_batch_size,", "checkpoint', default=100, type=int ) parser.add_argument( '--output_dir', help='GCS location to write", "exports_to_keep=1) ], train_steps=train_steps, eval_steps=eval_steps, **experiment_args ) return _experiment_fn if __name__", "datasets can be found. train_batch_size: Batch size during training. train_batch_size:", "steps to run evalution for at each checkpoint', default=100, type=int", "None means \"use default\" wich is equivalent to [] --", "before running first evaluation', default=10, type=int ) parser.add_argument( '--min_eval_frequency', help='Minimum", "'--job-dir', help='this model ignores this field, but it is required", "train_steps: Number of training steps. eval_steps: Number of evaluation steps.", "instantiated. \"\"\" classes = tf.gfile.Open('%s/labels.txt' % data_dir).read().splitlines() n_classes = len(classes)", "of evaluation steps. cell_size: LSTM cell size. hidden: Number of", "= tf.contrib.training.HParams( cell_size=cell_size, hidden=hidden or None, # Default is empty", "layers (note that None means \"use default\" wich is equivalent", "# unused args provided by service arguments.pop('job_dir', None) arguments.pop('job-dir', None)", "steps between evaluations', default=1, type=int ) # Hyper parameters. parser.add_argument(", "files_pattern=os.path.join(data_dir, 'train-*'), batch_size=train_batch_size), eval_input_fn=model.make_input_fn_stroke( files_pattern=os.path.join(data_dir, 'eval-*'), batch_size=eval_batch_size), export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn,", "batch_size=eval_batch_size), export_strategies=[ tf.contrib.learn.utils.saved_model_export_utils.make_export_strategy( model.serving_input_fn, exports_to_keep=1) ], train_steps=train_steps, eval_steps=eval_steps, **experiment_args )", "eval_steps, cell_size, hidden, **experiment_args): \"\"\"Returns experiment_fn for a RNN classifier.", "for.', type=int, default=10000 ) parser.add_argument( '--eval_steps', help='Number of steps to", "'--output_dir', help='GCS location to write checkpoints and export models', required=True", "model). experiment_args: Additional arguments when `tf.contrib.learn.Experiment` is instantiated. \"\"\" classes", "n_classes = len(classes) params = tf.contrib.training.HParams( cell_size=cell_size, hidden=hidden or None,", "required=True ) parser.add_argument( '--job-dir', help='this model ignores this field, but", "default=256, type=int ) parser.add_argument( '--hidden', help='Units in hidden layers.', default=(),", "Cloud ML.\"\"\" import argparse, glob, os import tensorflow as tf", "for at each checkpoint', default=100, type=int ) parser.add_argument( '--output_dir', help='GCS", "parser.add_argument( '--min_eval_frequency', help='Minimum number of training steps between evaluations', default=1,", "-- see code in model). experiment_args: Additional arguments when `tf.contrib.learn.Experiment`", "local path to training data', required=True ) parser.add_argument( '--train_batch_size', help='Batch", "empty list. ) config = tf.contrib.learn.RunConfig() def _experiment_fn(output_dir): return tf.contrib.learn.Experiment(" ]
[ "310 ms # Memory: 1664 KB n = int(input()) e", "# Time: 310 ms # Memory: 1664 KB n =", "in range(n): s =s- eval(input().replace(' ', '-')) e = max(e,", "n = int(input()) e = 0 s = 0 for", "= 0 for i in range(n): s =s- eval(input().replace(' ',", "range(n): s =s- eval(input().replace(' ', '-')) e = max(e, s)", "= 0 s = 0 for i in range(n): s", "i in range(n): s =s- eval(input().replace(' ', '-')) e =", "<filename>A/116A.py # Time: 310 ms # Memory: 1664 KB n", "e = 0 s = 0 for i in range(n):", "= int(input()) e = 0 s = 0 for i", "s =s- eval(input().replace(' ', '-')) e = max(e, s) print(e)", "Memory: 1664 KB n = int(input()) e = 0 s", "for i in range(n): s =s- eval(input().replace(' ', '-')) e", "Time: 310 ms # Memory: 1664 KB n = int(input())", "# Memory: 1664 KB n = int(input()) e = 0", "s = 0 for i in range(n): s =s- eval(input().replace('", "int(input()) e = 0 s = 0 for i in", "KB n = int(input()) e = 0 s = 0", "ms # Memory: 1664 KB n = int(input()) e =", "1664 KB n = int(input()) e = 0 s =", "0 for i in range(n): s =s- eval(input().replace(' ', '-'))", "0 s = 0 for i in range(n): s =s-" ]
[ "pyredner.get_device()), uvs = None, normals = None, material_id = 0)", "scene = scene, num_samples = 16, max_bounces = 1) render", "torch.tensor([0.0, 1.0, 0.0]), fov = torch.tensor([45.0]), # in degree clip_near", "1.0, -7.0]], device = pyredner.get_device()), indices = torch.tensor([[0, 1, 2],[1,", "= torch.tensor([0.0, 0.0, 0.0]), up = torch.tensor([0.0, 1.0, 0.0]), fov", "area_lights) scene_state_dict = scene.state_dict() scene = pyredner.Scene.load_state_dict(scene_state_dict) scene_args = pyredner.RenderFunction.serialize_scene(\\", "material_id = 0) shape_light = pyredner.Shape(\\ vertices = torch.tensor([[-1.0, -1.0,", "0) shape_light = pyredner.Shape(\\ vertices = torch.tensor([[-1.0, -1.0, -7.0], [", "dtype = torch.int32, device = pyredner.get_device()), uvs = None, normals", "0.0]), up = torch.tensor([0.0, 1.0, 0.0]), fov = torch.tensor([45.0]), #", "= [light] scene = pyredner.Scene(cam, shapes, materials, area_lights) scene_state_dict =", "[ 1.0, 1.0, -7.0]], device = pyredner.get_device()), indices = torch.tensor([[0,", "16, max_bounces = 1) render = pyredner.RenderFunction.apply img = render(0,", "shapes = [shape_triangle, shape_light] light = pyredner.AreaLight(shape_id = 1, intensity", "import numpy as np import torch cam = pyredner.Camera(position =", "device = pyredner.get_device()), indices = torch.tensor([[0, 1, 2],[1, 3, 2]],", "scene_state_dict = scene.state_dict() scene = pyredner.Scene.load_state_dict(scene_state_dict) scene_args = pyredner.RenderFunction.serialize_scene(\\ scene", "= pyredner.get_device()), indices = torch.tensor([[0, 1, 2]], dtype = torch.int32,", "None, normals = None, material_id = 0) shapes = [shape_triangle,", "= torch.tensor([20.0,20.0,20.0])) area_lights = [light] scene = pyredner.Scene(cam, shapes, materials,", "torch.tensor([45.0]), # in degree clip_near = 1e-2, # needs to", "= torch.int32, device = pyredner.get_device()), uvs = None, normals =", "torch.tensor([0.0, 0.0, -5.0]), look_at = torch.tensor([0.0, 0.0, 0.0]), up =", "= 0) shapes = [shape_triangle, shape_light] light = pyredner.AreaLight(shape_id =", "vertices = torch.tensor([[-1.0, -1.0, -7.0], [ 1.0, -1.0, -7.0], [-1.0,", "0.0]), fov = torch.tensor([45.0]), # in degree clip_near = 1e-2,", "device = pyredner.get_device())) materials = [mat_grey] shape_triangle = pyredner.Shape(\\ vertices", "# in degree clip_near = 1e-2, # needs to >", "= pyredner.Camera(position = torch.tensor([0.0, 0.0, -5.0]), look_at = torch.tensor([0.0, 0.0,", "0 resolution = (256, 256), fisheye = False) mat_grey =", "[mat_grey] shape_triangle = pyredner.Shape(\\ vertices = torch.tensor([[-1.7, 1.0, 0.0], [1.0,", "= None, material_id = 0) shape_light = pyredner.Shape(\\ vertices =", "cam = pyredner.Camera(position = torch.tensor([0.0, 0.0, -5.0]), look_at = torch.tensor([0.0,", "None, material_id = 0) shape_light = pyredner.Shape(\\ vertices = torch.tensor([[-1.0,", "materials, area_lights) scene_state_dict = scene.state_dict() scene = pyredner.Scene.load_state_dict(scene_state_dict) scene_args =", "0.5], device = pyredner.get_device())) materials = [mat_grey] shape_triangle = pyredner.Shape(\\", "= None, normals = None, material_id = 0) shapes =", "shape_light = pyredner.Shape(\\ vertices = torch.tensor([[-1.0, -1.0, -7.0], [ 1.0,", "pyredner.AreaLight(shape_id = 1, intensity = torch.tensor([20.0,20.0,20.0])) area_lights = [light] scene", "None, normals = None, material_id = 0) shape_light = pyredner.Shape(\\", "shape_light] light = pyredner.AreaLight(shape_id = 1, intensity = torch.tensor([20.0,20.0,20.0])) area_lights", "= pyredner.Material(\\ diffuse_reflectance = \\ torch.tensor([0.5, 0.5, 0.5], device =", "[ 1.0, -1.0, -7.0], [-1.0, 1.0, -7.0], [ 1.0, 1.0,", "= torch.tensor([0.0, 1.0, 0.0]), fov = torch.tensor([45.0]), # in degree", "light = pyredner.AreaLight(shape_id = 1, intensity = torch.tensor([20.0,20.0,20.0])) area_lights =", "False) mat_grey = pyredner.Material(\\ diffuse_reflectance = \\ torch.tensor([0.5, 0.5, 0.5],", "1) render = pyredner.RenderFunction.apply img = render(0, *scene_args) pyredner.imwrite(img.cpu(), 'results/test_serialize/img.exr')", "0.0, -5.0]), look_at = torch.tensor([0.0, 0.0, 0.0]), up = torch.tensor([0.0,", "diffuse_reflectance = \\ torch.tensor([0.5, 0.5, 0.5], device = pyredner.get_device())) materials", "= scene, num_samples = 16, max_bounces = 1) render =", "= \\ torch.tensor([0.5, 0.5, 0.5], device = pyredner.get_device())) materials =", "0.0], [1.0, 1.0, 0.0], [-0.5, -1.0, 0.0]], device = pyredner.get_device()),", "= 1, intensity = torch.tensor([20.0,20.0,20.0])) area_lights = [light] scene =", "numpy as np import torch cam = pyredner.Camera(position = torch.tensor([0.0,", "[shape_triangle, shape_light] light = pyredner.AreaLight(shape_id = 1, intensity = torch.tensor([20.0,20.0,20.0]))", "materials = [mat_grey] shape_triangle = pyredner.Shape(\\ vertices = torch.tensor([[-1.7, 1.0,", "torch.tensor([20.0,20.0,20.0])) area_lights = [light] scene = pyredner.Scene(cam, shapes, materials, area_lights)", "torch cam = pyredner.Camera(position = torch.tensor([0.0, 0.0, -5.0]), look_at =", "0.0, 0.0]), up = torch.tensor([0.0, 1.0, 0.0]), fov = torch.tensor([45.0]),", "0.0]], device = pyredner.get_device()), indices = torch.tensor([[0, 1, 2]], dtype", "scene = pyredner.Scene(cam, shapes, materials, area_lights) scene_state_dict = scene.state_dict() scene", "pyredner.Scene(cam, shapes, materials, area_lights) scene_state_dict = scene.state_dict() scene = pyredner.Scene.load_state_dict(scene_state_dict)", "resolution = (256, 256), fisheye = False) mat_grey = pyredner.Material(\\", "1.0, 1.0, -7.0]], device = pyredner.get_device()), indices = torch.tensor([[0, 1,", "needs to > 0 resolution = (256, 256), fisheye =", "= pyredner.AreaLight(shape_id = 1, intensity = torch.tensor([20.0,20.0,20.0])) area_lights = [light]", "material_id = 0) shapes = [shape_triangle, shape_light] light = pyredner.AreaLight(shape_id", "import torch cam = pyredner.Camera(position = torch.tensor([0.0, 0.0, -5.0]), look_at", "scene_args = pyredner.RenderFunction.serialize_scene(\\ scene = scene, num_samples = 16, max_bounces", "= pyredner.Shape(\\ vertices = torch.tensor([[-1.7, 1.0, 0.0], [1.0, 1.0, 0.0],", "-7.0]], device = pyredner.get_device()), indices = torch.tensor([[0, 1, 2],[1, 3,", "indices = torch.tensor([[0, 1, 2],[1, 3, 2]], dtype = torch.int32,", "num_samples = 16, max_bounces = 1) render = pyredner.RenderFunction.apply img", "uvs = None, normals = None, material_id = 0) shape_light", "up = torch.tensor([0.0, 1.0, 0.0]), fov = torch.tensor([45.0]), # in", "torch.tensor([[-1.0, -1.0, -7.0], [ 1.0, -1.0, -7.0], [-1.0, 1.0, -7.0],", "= None, material_id = 0) shapes = [shape_triangle, shape_light] light", "= 1e-2, # needs to > 0 resolution = (256,", "normals = None, material_id = 0) shape_light = pyredner.Shape(\\ vertices", "look_at = torch.tensor([0.0, 0.0, 0.0]), up = torch.tensor([0.0, 1.0, 0.0]),", "= False) mat_grey = pyredner.Material(\\ diffuse_reflectance = \\ torch.tensor([0.5, 0.5,", "-7.0], [-1.0, 1.0, -7.0], [ 1.0, 1.0, -7.0]], device =", "torch.tensor([[0, 1, 2]], dtype = torch.int32, device = pyredner.get_device()), uvs", "area_lights = [light] scene = pyredner.Scene(cam, shapes, materials, area_lights) scene_state_dict", "= pyredner.get_device())) materials = [mat_grey] shape_triangle = pyredner.Shape(\\ vertices =", "-1.0, 0.0]], device = pyredner.get_device()), indices = torch.tensor([[0, 1, 2]],", "vertices = torch.tensor([[-1.7, 1.0, 0.0], [1.0, 1.0, 0.0], [-0.5, -1.0,", "mat_grey = pyredner.Material(\\ diffuse_reflectance = \\ torch.tensor([0.5, 0.5, 0.5], device", "pyredner.Shape(\\ vertices = torch.tensor([[-1.0, -1.0, -7.0], [ 1.0, -1.0, -7.0],", "= (256, 256), fisheye = False) mat_grey = pyredner.Material(\\ diffuse_reflectance", "= 16, max_bounces = 1) render = pyredner.RenderFunction.apply img =", "1, 2],[1, 3, 2]], dtype = torch.int32, device = pyredner.get_device()),", "pyredner.Camera(position = torch.tensor([0.0, 0.0, -5.0]), look_at = torch.tensor([0.0, 0.0, 0.0]),", "[light] scene = pyredner.Scene(cam, shapes, materials, area_lights) scene_state_dict = scene.state_dict()", "max_bounces = 1) render = pyredner.RenderFunction.apply img = render(0, *scene_args)", "torch.tensor([0.5, 0.5, 0.5], device = pyredner.get_device())) materials = [mat_grey] shape_triangle", "torch.tensor([[-1.7, 1.0, 0.0], [1.0, 1.0, 0.0], [-0.5, -1.0, 0.0]], device", "to > 0 resolution = (256, 256), fisheye = False)", "pyredner.Shape(\\ vertices = torch.tensor([[-1.7, 1.0, 0.0], [1.0, 1.0, 0.0], [-0.5,", "[-0.5, -1.0, 0.0]], device = pyredner.get_device()), indices = torch.tensor([[0, 1,", "1.0, 0.0], [-0.5, -1.0, 0.0]], device = pyredner.get_device()), indices =", "1, 2]], dtype = torch.int32, device = pyredner.get_device()), uvs =", "-5.0]), look_at = torch.tensor([0.0, 0.0, 0.0]), up = torch.tensor([0.0, 1.0,", "normals = None, material_id = 0) shapes = [shape_triangle, shape_light]", "pyredner.get_device()), indices = torch.tensor([[0, 1, 2]], dtype = torch.int32, device", "pyredner.get_device()), indices = torch.tensor([[0, 1, 2],[1, 3, 2]], dtype =", "\\ torch.tensor([0.5, 0.5, 0.5], device = pyredner.get_device())) materials = [mat_grey]", "device = pyredner.get_device()), uvs = None, normals = None, material_id", "scene, num_samples = 16, max_bounces = 1) render = pyredner.RenderFunction.apply", "1.0, -1.0, -7.0], [-1.0, 1.0, -7.0], [ 1.0, 1.0, -7.0]],", "= torch.tensor([[-1.7, 1.0, 0.0], [1.0, 1.0, 0.0], [-0.5, -1.0, 0.0]],", "= torch.tensor([[-1.0, -1.0, -7.0], [ 1.0, -1.0, -7.0], [-1.0, 1.0,", "(256, 256), fisheye = False) mat_grey = pyredner.Material(\\ diffuse_reflectance =", "= torch.tensor([[0, 1, 2],[1, 3, 2]], dtype = torch.int32, device", "device = pyredner.get_device()), indices = torch.tensor([[0, 1, 2]], dtype =", "pyredner.get_device())) materials = [mat_grey] shape_triangle = pyredner.Shape(\\ vertices = torch.tensor([[-1.7,", "1e-2, # needs to > 0 resolution = (256, 256),", "scene.state_dict() scene = pyredner.Scene.load_state_dict(scene_state_dict) scene_args = pyredner.RenderFunction.serialize_scene(\\ scene = scene,", "= pyredner.Shape(\\ vertices = torch.tensor([[-1.0, -1.0, -7.0], [ 1.0, -1.0,", "= None, normals = None, material_id = 0) shape_light =", "= torch.tensor([[0, 1, 2]], dtype = torch.int32, device = pyredner.get_device()),", "2]], dtype = torch.int32, device = pyredner.get_device()), uvs = None,", "= 0) shape_light = pyredner.Shape(\\ vertices = torch.tensor([[-1.0, -1.0, -7.0],", "np import torch cam = pyredner.Camera(position = torch.tensor([0.0, 0.0, -5.0]),", "indices = torch.tensor([[0, 1, 2]], dtype = torch.int32, device =", "> 0 resolution = (256, 256), fisheye = False) mat_grey", "0.5, 0.5], device = pyredner.get_device())) materials = [mat_grey] shape_triangle =", "[1.0, 1.0, 0.0], [-0.5, -1.0, 0.0]], device = pyredner.get_device()), indices", "-7.0], [ 1.0, 1.0, -7.0]], device = pyredner.get_device()), indices =", "= 1) render = pyredner.RenderFunction.apply img = render(0, *scene_args) pyredner.imwrite(img.cpu(),", "256), fisheye = False) mat_grey = pyredner.Material(\\ diffuse_reflectance = \\", "= [mat_grey] shape_triangle = pyredner.Shape(\\ vertices = torch.tensor([[-1.7, 1.0, 0.0],", "fov = torch.tensor([45.0]), # in degree clip_near = 1e-2, #", "None, material_id = 0) shapes = [shape_triangle, shape_light] light =", "torch.tensor([[0, 1, 2],[1, 3, 2]], dtype = torch.int32, device =", "torch.int32, device = pyredner.get_device()), uvs = None, normals = None,", "3, 2]], dtype = torch.int32, device = pyredner.get_device()), uvs =", "0.0], [-0.5, -1.0, 0.0]], device = pyredner.get_device()), indices = torch.tensor([[0,", "= torch.tensor([45.0]), # in degree clip_near = 1e-2, # needs", "-1.0, -7.0], [-1.0, 1.0, -7.0], [ 1.0, 1.0, -7.0]], device", "degree clip_near = 1e-2, # needs to > 0 resolution", "pyredner.RenderFunction.serialize_scene(\\ scene = scene, num_samples = 16, max_bounces = 1)", "pyredner.Material(\\ diffuse_reflectance = \\ torch.tensor([0.5, 0.5, 0.5], device = pyredner.get_device()))", "torch.tensor([0.0, 0.0, 0.0]), up = torch.tensor([0.0, 1.0, 0.0]), fov =", "= pyredner.Scene(cam, shapes, materials, area_lights) scene_state_dict = scene.state_dict() scene =", "1.0, 0.0]), fov = torch.tensor([45.0]), # in degree clip_near =", "scene = pyredner.Scene.load_state_dict(scene_state_dict) scene_args = pyredner.RenderFunction.serialize_scene(\\ scene = scene, num_samples", "2],[1, 3, 2]], dtype = torch.int32, device = pyredner.get_device()), uvs", "import pyredner import numpy as np import torch cam =", "clip_near = 1e-2, # needs to > 0 resolution =", "= pyredner.Scene.load_state_dict(scene_state_dict) scene_args = pyredner.RenderFunction.serialize_scene(\\ scene = scene, num_samples =", "shape_triangle = pyredner.Shape(\\ vertices = torch.tensor([[-1.7, 1.0, 0.0], [1.0, 1.0,", "-1.0, -7.0], [ 1.0, -1.0, -7.0], [-1.0, 1.0, -7.0], [", "as np import torch cam = pyredner.Camera(position = torch.tensor([0.0, 0.0,", "= [shape_triangle, shape_light] light = pyredner.AreaLight(shape_id = 1, intensity =", "pyredner.Scene.load_state_dict(scene_state_dict) scene_args = pyredner.RenderFunction.serialize_scene(\\ scene = scene, num_samples = 16,", "= scene.state_dict() scene = pyredner.Scene.load_state_dict(scene_state_dict) scene_args = pyredner.RenderFunction.serialize_scene(\\ scene =", "1.0, -7.0], [ 1.0, 1.0, -7.0]], device = pyredner.get_device()), indices", "intensity = torch.tensor([20.0,20.0,20.0])) area_lights = [light] scene = pyredner.Scene(cam, shapes,", "1.0, 0.0], [1.0, 1.0, 0.0], [-0.5, -1.0, 0.0]], device =", "0) shapes = [shape_triangle, shape_light] light = pyredner.AreaLight(shape_id = 1,", "= torch.tensor([0.0, 0.0, -5.0]), look_at = torch.tensor([0.0, 0.0, 0.0]), up", "-7.0], [ 1.0, -1.0, -7.0], [-1.0, 1.0, -7.0], [ 1.0,", "pyredner import numpy as np import torch cam = pyredner.Camera(position", "# needs to > 0 resolution = (256, 256), fisheye", "= pyredner.get_device()), indices = torch.tensor([[0, 1, 2],[1, 3, 2]], dtype", "= pyredner.RenderFunction.serialize_scene(\\ scene = scene, num_samples = 16, max_bounces =", "1, intensity = torch.tensor([20.0,20.0,20.0])) area_lights = [light] scene = pyredner.Scene(cam,", "in degree clip_near = 1e-2, # needs to > 0", "uvs = None, normals = None, material_id = 0) shapes", "= pyredner.get_device()), uvs = None, normals = None, material_id =", "shapes, materials, area_lights) scene_state_dict = scene.state_dict() scene = pyredner.Scene.load_state_dict(scene_state_dict) scene_args", "[-1.0, 1.0, -7.0], [ 1.0, 1.0, -7.0]], device = pyredner.get_device()),", "fisheye = False) mat_grey = pyredner.Material(\\ diffuse_reflectance = \\ torch.tensor([0.5," ]
[ "AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. #", "DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES", "<reponame>Shoobx/zope.publisher ############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation", "(ZPL). A copy of the ZPL should accompany this distribution.", "A copy of the ZPL should accompany this distribution. #", "zope.publisher.base \\ import RequestDataProperty, RequestDataGetter, RequestDataMapper class TestDataGettr(RequestDataGetter): _gettrname =", "provisions of the Zope Public License, # Version 2.1 (ZPL).", "else: raise AssertionError(\"Shouldn't be able to assign\") try: data.somedata =", "\"something Zope\"}, [\"spam\"]) def testRequestDataMapper(self): data = Data() sample =", "testIEnumerableMapping, testIReadMapping from zope.publisher.base \\ import RequestDataProperty, RequestDataGetter, RequestDataMapper class", "PURPOSE. # ############################################################################## \"\"\"Request Data-Property Tests \"\"\" from unittest import", "All Rights Reserved. # # This software is subject to", "RequestDataGetter, RequestDataMapper class TestDataGettr(RequestDataGetter): _gettrname = 'getSomething' class TestDataMapper(RequestDataMapper): _mapname", "IS PROVIDED \"AS IS\" AND ANY AND ALL EXPRESS OR", "Contributors. # All Rights Reserved. # # This software is", "testIReadMapping(self, inst, sample, [\"spam\"]) testIEnumerableMapping(self, inst, sample) def testNoAssign(self): data", "THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL", "\"something %s\" % name if default is not _marker: return", "is subject to the provisions of the Zope Public License,", "Data() sample = {'foo': 'Foo', 'bar': 'Bar'} data._data = sample", "sample, [\"spam\"]) testIEnumerableMapping(self, inst, sample) def testNoAssign(self): data = Data()", "else: raise AssertionError(\"Shouldn't be able to assign\") def test_suite(): return", "default=_marker): if name.startswith('Z'): return \"something %s\" % name if default", "should accompany this distribution. # THIS SOFTWARE IS PROVIDED \"AS", "name if default is not _marker: return default raise KeyError(name)", "to the provisions of the Zope Public License, # Version", "AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING,", "import testIEnumerableMapping, testIReadMapping from zope.publisher.base \\ import RequestDataProperty, RequestDataGetter, RequestDataMapper", "the provisions of the Zope Public License, # Version 2.1", "copy of the ZPL should accompany this distribution. # THIS", "(c) 2001, 2002 Zope Foundation and Contributors. # All Rights", "if default is not _marker: return default raise KeyError(name) something", "except AttributeError: pass else: raise AssertionError(\"Shouldn't be able to assign\")", "inst, sample, [\"spam\"]) testIEnumerableMapping(self, inst, sample) def testNoAssign(self): data =", "OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A", "RequestDataMapper class TestDataGettr(RequestDataGetter): _gettrname = 'getSomething' class TestDataMapper(RequestDataMapper): _mapname =", "zope.interface.common.tests.basemapping \\ import testIEnumerableMapping, testIReadMapping from zope.publisher.base \\ import RequestDataProperty,", "BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE,", "% name if default is not _marker: return default raise", "Reserved. # # This software is subject to the provisions", "{} except AttributeError: pass else: raise AssertionError(\"Shouldn't be able to", "{\"Zope\": \"something Zope\"}, [\"spam\"]) def testRequestDataMapper(self): data = Data() sample", "2002 Zope Foundation and Contributors. # All Rights Reserved. #", "[\"spam\"]) def testRequestDataMapper(self): data = Data() sample = {'foo': 'Foo',", "raise KeyError(name) something = RequestDataProperty(TestDataGettr) somedata = RequestDataProperty(TestDataMapper) class Test(TestCase):", "Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All", "try: data.something = {} except AttributeError: pass else: raise AssertionError(\"Shouldn't", "data = Data() try: data.something = {} except AttributeError: pass", "Data(object): def getSomething(self, name, default=_marker): if name.startswith('Z'): return \"something %s\"", "WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED", "'bar': 'Bar'} data._data = sample inst = data.somedata testIReadMapping(self, inst,", "from unittest import TestCase, makeSuite from zope.interface.common.tests.basemapping \\ import testIEnumerableMapping,", "_mapname = '_data' _marker = object() class Data(object): def getSomething(self,", "object() class Data(object): def getSomething(self, name, default=_marker): if name.startswith('Z'): return", "RequestDataProperty(TestDataMapper) class Test(TestCase): def testRequestDataGettr(self): testIReadMapping(self, Data().something, {\"Zope\": \"something Zope\"},", "distribution. # THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY", "of the ZPL should accompany this distribution. # THIS SOFTWARE", "IS\" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES", "IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,", "'Foo', 'bar': 'Bar'} data._data = sample inst = data.somedata testIReadMapping(self,", "AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE", "# FOR A PARTICULAR PURPOSE. # ############################################################################## \"\"\"Request Data-Property Tests", "'_data' _marker = object() class Data(object): def getSomething(self, name, default=_marker):", "Zope\"}, [\"spam\"]) def testRequestDataMapper(self): data = Data() sample = {'foo':", "= {} except AttributeError: pass else: raise AssertionError(\"Shouldn't be able", "subject to the provisions of the Zope Public License, #", "# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS #", "INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ##############################################################################", "testRequestDataGettr(self): testIReadMapping(self, Data().something, {\"Zope\": \"something Zope\"}, [\"spam\"]) def testRequestDataMapper(self): data", "raise AssertionError(\"Shouldn't be able to assign\") def test_suite(): return makeSuite(Test)", "\\ import testIEnumerableMapping, testIReadMapping from zope.publisher.base \\ import RequestDataProperty, RequestDataGetter,", "from zope.interface.common.tests.basemapping \\ import testIEnumerableMapping, testIReadMapping from zope.publisher.base \\ import", "# # This software is subject to the provisions of", "= sample inst = data.somedata testIReadMapping(self, inst, sample, [\"spam\"]) testIEnumerableMapping(self,", "inst, sample) def testNoAssign(self): data = Data() try: data.something =", "= Data() try: data.something = {} except AttributeError: pass else:", "'Bar'} data._data = sample inst = data.somedata testIReadMapping(self, inst, sample,", "and Contributors. # All Rights Reserved. # # This software", "PROVIDED \"AS IS\" AND ANY AND ALL EXPRESS OR IMPLIED", "AttributeError: pass else: raise AssertionError(\"Shouldn't be able to assign\") try:", "to assign\") try: data.somedata = {} except AttributeError: pass else:", "class TestDataMapper(RequestDataMapper): _mapname = '_data' _marker = object() class Data(object):", "PARTICULAR PURPOSE. # ############################################################################## \"\"\"Request Data-Property Tests \"\"\" from unittest", "ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED #", "# This software is subject to the provisions of the", "accompany this distribution. # THIS SOFTWARE IS PROVIDED \"AS IS\"", "ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT", "default is not _marker: return default raise KeyError(name) something =", "data.somedata = {} except AttributeError: pass else: raise AssertionError(\"Shouldn't be", "THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND", "Data() try: data.something = {} except AttributeError: pass else: raise", "= RequestDataProperty(TestDataGettr) somedata = RequestDataProperty(TestDataMapper) class Test(TestCase): def testRequestDataGettr(self): testIReadMapping(self,", "_marker: return default raise KeyError(name) something = RequestDataProperty(TestDataGettr) somedata =", "getSomething(self, name, default=_marker): if name.startswith('Z'): return \"something %s\" % name", "Data-Property Tests \"\"\" from unittest import TestCase, makeSuite from zope.interface.common.tests.basemapping", "############################################################################## \"\"\"Request Data-Property Tests \"\"\" from unittest import TestCase, makeSuite", "this distribution. # THIS SOFTWARE IS PROVIDED \"AS IS\" AND", "makeSuite from zope.interface.common.tests.basemapping \\ import testIEnumerableMapping, testIReadMapping from zope.publisher.base \\", "inst = data.somedata testIReadMapping(self, inst, sample, [\"spam\"]) testIEnumerableMapping(self, inst, sample)", "Tests \"\"\" from unittest import TestCase, makeSuite from zope.interface.common.tests.basemapping \\", "Rights Reserved. # # This software is subject to the", "import RequestDataProperty, RequestDataGetter, RequestDataMapper class TestDataGettr(RequestDataGetter): _gettrname = 'getSomething' class", "= '_data' _marker = object() class Data(object): def getSomething(self, name,", "LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST", "def testRequestDataGettr(self): testIReadMapping(self, Data().something, {\"Zope\": \"something Zope\"}, [\"spam\"]) def testRequestDataMapper(self):", "AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## \"\"\"Request", "\"\"\" from unittest import TestCase, makeSuite from zope.interface.common.tests.basemapping \\ import", "RequestDataProperty, RequestDataGetter, RequestDataMapper class TestDataGettr(RequestDataGetter): _gettrname = 'getSomething' class TestDataMapper(RequestDataMapper):", "FOR A PARTICULAR PURPOSE. # ############################################################################## \"\"\"Request Data-Property Tests \"\"\"", "############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and", "name.startswith('Z'): return \"something %s\" % name if default is not", "class Data(object): def getSomething(self, name, default=_marker): if name.startswith('Z'): return \"something", "is not _marker: return default raise KeyError(name) something = RequestDataProperty(TestDataGettr)", "MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE.", "_gettrname = 'getSomething' class TestDataMapper(RequestDataMapper): _mapname = '_data' _marker =", "testRequestDataMapper(self): data = Data() sample = {'foo': 'Foo', 'bar': 'Bar'}", "def testNoAssign(self): data = Data() try: data.something = {} except", "= 'getSomething' class TestDataMapper(RequestDataMapper): _mapname = '_data' _marker = object()", "Zope Foundation and Contributors. # All Rights Reserved. # #", "data = Data() sample = {'foo': 'Foo', 'bar': 'Bar'} data._data", "not _marker: return default raise KeyError(name) something = RequestDataProperty(TestDataGettr) somedata", "\"AS IS\" AND ANY AND ALL EXPRESS OR IMPLIED #", "class Test(TestCase): def testRequestDataGettr(self): testIReadMapping(self, Data().something, {\"Zope\": \"something Zope\"}, [\"spam\"])", "sample) def testNoAssign(self): data = Data() try: data.something = {}", "SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL EXPRESS", "{'foo': 'Foo', 'bar': 'Bar'} data._data = sample inst = data.somedata", "be able to assign\") try: data.somedata = {} except AttributeError:", "name, default=_marker): if name.startswith('Z'): return \"something %s\" % name if", "return default raise KeyError(name) something = RequestDataProperty(TestDataGettr) somedata = RequestDataProperty(TestDataMapper)", "able to assign\") try: data.somedata = {} except AttributeError: pass", "ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED,", "RequestDataProperty(TestDataGettr) somedata = RequestDataProperty(TestDataMapper) class Test(TestCase): def testRequestDataGettr(self): testIReadMapping(self, Data().something,", "TestCase, makeSuite from zope.interface.common.tests.basemapping \\ import testIEnumerableMapping, testIReadMapping from zope.publisher.base", "= object() class Data(object): def getSomething(self, name, default=_marker): if name.startswith('Z'):", "class TestDataGettr(RequestDataGetter): _gettrname = 'getSomething' class TestDataMapper(RequestDataMapper): _mapname = '_data'", "unittest import TestCase, makeSuite from zope.interface.common.tests.basemapping \\ import testIEnumerableMapping, testIReadMapping", "Data().something, {\"Zope\": \"something Zope\"}, [\"spam\"]) def testRequestDataMapper(self): data = Data()", "of the Zope Public License, # Version 2.1 (ZPL). A", "data.somedata testIReadMapping(self, inst, sample, [\"spam\"]) testIEnumerableMapping(self, inst, sample) def testNoAssign(self):", "# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE", "TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR", "def testRequestDataMapper(self): data = Data() sample = {'foo': 'Foo', 'bar':", "data.something = {} except AttributeError: pass else: raise AssertionError(\"Shouldn't be", "Public License, # Version 2.1 (ZPL). A copy of the", "2.1 (ZPL). A copy of the ZPL should accompany this", "2001, 2002 Zope Foundation and Contributors. # All Rights Reserved.", "= data.somedata testIReadMapping(self, inst, sample, [\"spam\"]) testIEnumerableMapping(self, inst, sample) def", "This software is subject to the provisions of the Zope", "somedata = RequestDataProperty(TestDataMapper) class Test(TestCase): def testRequestDataGettr(self): testIReadMapping(self, Data().something, {\"Zope\":", "_marker = object() class Data(object): def getSomething(self, name, default=_marker): if", "software is subject to the provisions of the Zope Public", "\"\"\"Request Data-Property Tests \"\"\" from unittest import TestCase, makeSuite from", "Test(TestCase): def testRequestDataGettr(self): testIReadMapping(self, Data().something, {\"Zope\": \"something Zope\"}, [\"spam\"]) def", "ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED", "sample inst = data.somedata testIReadMapping(self, inst, sample, [\"spam\"]) testIEnumerableMapping(self, inst,", "TestDataMapper(RequestDataMapper): _mapname = '_data' _marker = object() class Data(object): def", "from zope.publisher.base \\ import RequestDataProperty, RequestDataGetter, RequestDataMapper class TestDataGettr(RequestDataGetter): _gettrname", "'getSomething' class TestDataMapper(RequestDataMapper): _mapname = '_data' _marker = object() class", "pass else: raise AssertionError(\"Shouldn't be able to assign\") def test_suite():", "the ZPL should accompany this distribution. # THIS SOFTWARE IS", "def getSomething(self, name, default=_marker): if name.startswith('Z'): return \"something %s\" %", "= {'foo': 'Foo', 'bar': 'Bar'} data._data = sample inst =", "AttributeError: pass else: raise AssertionError(\"Shouldn't be able to assign\") def", "testNoAssign(self): data = Data() try: data.something = {} except AttributeError:", "INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF", "[\"spam\"]) testIEnumerableMapping(self, inst, sample) def testNoAssign(self): data = Data() try:", "# Version 2.1 (ZPL). A copy of the ZPL should", "assign\") try: data.somedata = {} except AttributeError: pass else: raise", "testIReadMapping from zope.publisher.base \\ import RequestDataProperty, RequestDataGetter, RequestDataMapper class TestDataGettr(RequestDataGetter):", "raise AssertionError(\"Shouldn't be able to assign\") try: data.somedata = {}", "NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY,", "# All Rights Reserved. # # This software is subject", "data._data = sample inst = data.somedata testIReadMapping(self, inst, sample, [\"spam\"])", "if name.startswith('Z'): return \"something %s\" % name if default is", "import TestCase, makeSuite from zope.interface.common.tests.basemapping \\ import testIEnumerableMapping, testIReadMapping from", "%s\" % name if default is not _marker: return default", "# # Copyright (c) 2001, 2002 Zope Foundation and Contributors.", "# ############################################################################## \"\"\"Request Data-Property Tests \"\"\" from unittest import TestCase,", "KeyError(name) something = RequestDataProperty(TestDataGettr) somedata = RequestDataProperty(TestDataMapper) class Test(TestCase): def", "AssertionError(\"Shouldn't be able to assign\") try: data.somedata = {} except", "default raise KeyError(name) something = RequestDataProperty(TestDataGettr) somedata = RequestDataProperty(TestDataMapper) class", "FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## \"\"\"Request Data-Property", "pass else: raise AssertionError(\"Shouldn't be able to assign\") try: data.somedata", "Zope Public License, # Version 2.1 (ZPL). A copy of", "sample = {'foo': 'Foo', 'bar': 'Bar'} data._data = sample inst", "OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED", "TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT,", "TestDataGettr(RequestDataGetter): _gettrname = 'getSomething' class TestDataMapper(RequestDataMapper): _mapname = '_data' _marker", "testIReadMapping(self, Data().something, {\"Zope\": \"something Zope\"}, [\"spam\"]) def testRequestDataMapper(self): data =", "= Data() sample = {'foo': 'Foo', 'bar': 'Bar'} data._data =", "Version 2.1 (ZPL). A copy of the ZPL should accompany", "something = RequestDataProperty(TestDataGettr) somedata = RequestDataProperty(TestDataMapper) class Test(TestCase): def testRequestDataGettr(self):", "EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT", "testIEnumerableMapping(self, inst, sample) def testNoAssign(self): data = Data() try: data.something", "A PARTICULAR PURPOSE. # ############################################################################## \"\"\"Request Data-Property Tests \"\"\" from", "return \"something %s\" % name if default is not _marker:", "= RequestDataProperty(TestDataMapper) class Test(TestCase): def testRequestDataGettr(self): testIReadMapping(self, Data().something, {\"Zope\": \"something", "Foundation and Contributors. # All Rights Reserved. # # This", "License, # Version 2.1 (ZPL). A copy of the ZPL", "try: data.somedata = {} except AttributeError: pass else: raise AssertionError(\"Shouldn't", "the Zope Public License, # Version 2.1 (ZPL). A copy", "# THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND", "WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR", "IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS", "\\ import RequestDataProperty, RequestDataGetter, RequestDataMapper class TestDataGettr(RequestDataGetter): _gettrname = 'getSomething'", "# Copyright (c) 2001, 2002 Zope Foundation and Contributors. #" ]
[ "\"\"\" from os import path import sys sys.path.append( path.abspath(path.join('tools', 'validators',", "from os import path import sys sys.path.append( path.abspath(path.join('tools', 'validators', 'instance_validator')))", "Enable import \"\"\" from os import path import sys sys.path.append(", "import \"\"\" from os import path import sys sys.path.append( path.abspath(path.join('tools',", "<reponame>ahemphill/digitalbuildings \"\"\" Enable import \"\"\" from os import path import", "\"\"\" Enable import \"\"\" from os import path import sys" ]
[ "def __init__( self, cache: Cache, compress: Optional[Callable[[str], AnyStr]] = ...,", "from zlib import compress as default_compress, decompress as default_decompress from", "elif decompress is ...: self._decompress = self._default_decompress else: self._decompress =", "decompress: Optional[Callable[[AnyStr], str]] = ..., ) -> None: super().__init__() self._cache", "self._compress = compress if decompress is None: self._decompress = self._noop", "set(self, key, value, expiration: int) -> None: self._cache.set(key, self._compress(value), expiration)", "CacheCompressionDecorator(Cache): def __init__( self, cache: Cache, compress: Optional[Callable[[str], AnyStr]] =", "compress is None: self._compress = self._noop elif compress is ...:", "is None: self._decompress = self._noop elif decompress is ...: self._decompress", "decompress is ...: self._decompress = self._default_decompress else: self._decompress = decompress", "expiration: int) -> None: self._cache.set(key, self._compress(value), expiration) @staticmethod def _noop(x):", "zlib import compress as default_compress, decompress as default_decompress from .cache", "from .cache import Cache from ..constants import NOT_FOUND class CacheCompressionDecorator(Cache):", "Optional[Callable[[AnyStr], str]] = ..., ) -> None: super().__init__() self._cache =", "...: self._decompress = self._default_decompress else: self._decompress = decompress def get(self,", "compress is ...: self._compress = self._default_compress else: self._compress = compress", "= self._noop elif decompress is ...: self._decompress = self._default_decompress else:", "compress: Optional[Callable[[str], AnyStr]] = ..., decompress: Optional[Callable[[AnyStr], str]] = ...,", "import Callable, AnyStr, Optional from zlib import compress as default_compress,", "@staticmethod def _noop(x): return x @staticmethod def _default_compress(obj: str) ->", "self._compress = self._default_compress else: self._compress = compress if decompress is", "= self._default_compress else: self._compress = compress if decompress is None:", "as default_compress, decompress as default_decompress from .cache import Cache from", "def _default_compress(obj: str) -> bytes: return default_compress(obj.encode(\"UTF-8\")) @staticmethod def _default_decompress(data:", "= ..., decompress: Optional[Callable[[AnyStr], str]] = ..., ) -> None:", "from typing import Callable, AnyStr, Optional from zlib import compress", "return value if value is NOT_FOUND else self._decompress(value) def set(self,", "Cache, compress: Optional[Callable[[str], AnyStr]] = ..., decompress: Optional[Callable[[AnyStr], str]] =", "self._noop elif decompress is ...: self._decompress = self._default_decompress else: self._decompress", "= ..., ) -> None: super().__init__() self._cache = cache if", "...: self._compress = self._default_compress else: self._compress = compress if decompress", "x @staticmethod def _default_compress(obj: str) -> bytes: return default_compress(obj.encode(\"UTF-8\")) @staticmethod", "self._cache.set(key, self._compress(value), expiration) @staticmethod def _noop(x): return x @staticmethod def", "..., decompress: Optional[Callable[[AnyStr], str]] = ..., ) -> None: super().__init__()", "else self._decompress(value) def set(self, key, value, expiration: int) -> None:", "= decompress def get(self, key): value = self._cache.get(key) return value", "str]] = ..., ) -> None: super().__init__() self._cache = cache", "_noop(x): return x @staticmethod def _default_compress(obj: str) -> bytes: return", "value if value is NOT_FOUND else self._decompress(value) def set(self, key,", "def _noop(x): return x @staticmethod def _default_compress(obj: str) -> bytes:", "__init__( self, cache: Cache, compress: Optional[Callable[[str], AnyStr]] = ..., decompress:", "= compress if decompress is None: self._decompress = self._noop elif", "Optional[Callable[[str], AnyStr]] = ..., decompress: Optional[Callable[[AnyStr], str]] = ..., )", "decompress is None: self._decompress = self._noop elif decompress is ...:", "= self._cache.get(key) return value if value is NOT_FOUND else self._decompress(value)", "from ..constants import NOT_FOUND class CacheCompressionDecorator(Cache): def __init__( self, cache:", "return x @staticmethod def _default_compress(obj: str) -> bytes: return default_compress(obj.encode(\"UTF-8\"))", "is NOT_FOUND else self._decompress(value) def set(self, key, value, expiration: int)", "import NOT_FOUND class CacheCompressionDecorator(Cache): def __init__( self, cache: Cache, compress:", "else: self._compress = compress if decompress is None: self._decompress =", "is ...: self._decompress = self._default_decompress else: self._decompress = decompress def", "= self._noop elif compress is ...: self._compress = self._default_compress else:", "..constants import NOT_FOUND class CacheCompressionDecorator(Cache): def __init__( self, cache: Cache,", "return default_compress(obj.encode(\"UTF-8\")) @staticmethod def _default_decompress(data: bytes) -> str: return default_decompress(data).decode(\"UTF-8\")", "compress as default_compress, decompress as default_decompress from .cache import Cache", "key, value, expiration: int) -> None: self._cache.set(key, self._compress(value), expiration) @staticmethod", "..., ) -> None: super().__init__() self._cache = cache if compress", "cache: Cache, compress: Optional[Callable[[str], AnyStr]] = ..., decompress: Optional[Callable[[AnyStr], str]]", "get(self, key): value = self._cache.get(key) return value if value is", "else: self._decompress = decompress def get(self, key): value = self._cache.get(key)", "decompress def get(self, key): value = self._cache.get(key) return value if", "value is NOT_FOUND else self._decompress(value) def set(self, key, value, expiration:", "self._default_decompress else: self._decompress = decompress def get(self, key): value =", "self._cache.get(key) return value if value is NOT_FOUND else self._decompress(value) def", "None: self._decompress = self._noop elif decompress is ...: self._decompress =", "if value is NOT_FOUND else self._decompress(value) def set(self, key, value,", "default_compress, decompress as default_decompress from .cache import Cache from ..constants", "self, cache: Cache, compress: Optional[Callable[[str], AnyStr]] = ..., decompress: Optional[Callable[[AnyStr],", "if compress is None: self._compress = self._noop elif compress is", "Callable, AnyStr, Optional from zlib import compress as default_compress, decompress", "value, expiration: int) -> None: self._cache.set(key, self._compress(value), expiration) @staticmethod def", "bytes: return default_compress(obj.encode(\"UTF-8\")) @staticmethod def _default_decompress(data: bytes) -> str: return", "import compress as default_compress, decompress as default_decompress from .cache import", "self._noop elif compress is ...: self._compress = self._default_compress else: self._compress", "elif compress is ...: self._compress = self._default_compress else: self._compress =", "if decompress is None: self._decompress = self._noop elif decompress is", "@staticmethod def _default_compress(obj: str) -> bytes: return default_compress(obj.encode(\"UTF-8\")) @staticmethod def", "decompress as default_decompress from .cache import Cache from ..constants import", "= self._default_decompress else: self._decompress = decompress def get(self, key): value", "_default_compress(obj: str) -> bytes: return default_compress(obj.encode(\"UTF-8\")) @staticmethod def _default_decompress(data: bytes)", "import Cache from ..constants import NOT_FOUND class CacheCompressionDecorator(Cache): def __init__(", "def get(self, key): value = self._cache.get(key) return value if value", "int) -> None: self._cache.set(key, self._compress(value), expiration) @staticmethod def _noop(x): return", "default_decompress from .cache import Cache from ..constants import NOT_FOUND class", "-> bytes: return default_compress(obj.encode(\"UTF-8\")) @staticmethod def _default_decompress(data: bytes) -> str:", "Cache from ..constants import NOT_FOUND class CacheCompressionDecorator(Cache): def __init__( self,", "Optional from zlib import compress as default_compress, decompress as default_decompress", "as default_decompress from .cache import Cache from ..constants import NOT_FOUND", "super().__init__() self._cache = cache if compress is None: self._compress =", "None: super().__init__() self._cache = cache if compress is None: self._compress", "self._decompress = decompress def get(self, key): value = self._cache.get(key) return", "is ...: self._compress = self._default_compress else: self._compress = compress if", "self._decompress = self._default_decompress else: self._decompress = decompress def get(self, key):", "self._compress = self._noop elif compress is ...: self._compress = self._default_compress", "-> None: super().__init__() self._cache = cache if compress is None:", "None: self._compress = self._noop elif compress is ...: self._compress =", "typing import Callable, AnyStr, Optional from zlib import compress as", "str) -> bytes: return default_compress(obj.encode(\"UTF-8\")) @staticmethod def _default_decompress(data: bytes) ->", "class CacheCompressionDecorator(Cache): def __init__( self, cache: Cache, compress: Optional[Callable[[str], AnyStr]]", "key): value = self._cache.get(key) return value if value is NOT_FOUND", "self._decompress(value) def set(self, key, value, expiration: int) -> None: self._cache.set(key,", ") -> None: super().__init__() self._cache = cache if compress is", "= cache if compress is None: self._compress = self._noop elif", "value = self._cache.get(key) return value if value is NOT_FOUND else", "NOT_FOUND else self._decompress(value) def set(self, key, value, expiration: int) ->", "self._cache = cache if compress is None: self._compress = self._noop", "def set(self, key, value, expiration: int) -> None: self._cache.set(key, self._compress(value),", "compress if decompress is None: self._decompress = self._noop elif decompress", "-> None: self._cache.set(key, self._compress(value), expiration) @staticmethod def _noop(x): return x", "self._compress(value), expiration) @staticmethod def _noop(x): return x @staticmethod def _default_compress(obj:", "NOT_FOUND class CacheCompressionDecorator(Cache): def __init__( self, cache: Cache, compress: Optional[Callable[[str],", "expiration) @staticmethod def _noop(x): return x @staticmethod def _default_compress(obj: str)", "is None: self._compress = self._noop elif compress is ...: self._compress", "AnyStr, Optional from zlib import compress as default_compress, decompress as", "cache if compress is None: self._compress = self._noop elif compress", "self._decompress = self._noop elif decompress is ...: self._decompress = self._default_decompress", "None: self._cache.set(key, self._compress(value), expiration) @staticmethod def _noop(x): return x @staticmethod", "self._default_compress else: self._compress = compress if decompress is None: self._decompress", ".cache import Cache from ..constants import NOT_FOUND class CacheCompressionDecorator(Cache): def", "AnyStr]] = ..., decompress: Optional[Callable[[AnyStr], str]] = ..., ) ->" ]
[ "self, mob: Mobject, dt: float = 1, substeps: int =", "position. The values of this functions is displayed by moving", "[self.x_range, self.y_range] if three_dimensions or z_range: self.z_range = z_range or", "else: self.ranges += [[0, 0]] for i in range(len(self.ranges)): if", "60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater()) self.add(circle, dot) self.wait(6) \"\"\" def runge_kutta(self, p:", "point The root point of the vector. kwargs : Any", "A sequence of y_min, y_max, delta_y z_range A sequence of", "max_anchors_per_line=5, virtual_time=1, color=BLUE ) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5, time_width=0.5) self.wait(1) self.play(stream_lines.end_animation())", "each starting point. dt The factor by which the distance", "manim:: StreamLineCreation class StreamLineCreation(Scene): def construct(self): func = lambda pos:", "line.time += dt * flow_speed if line.time >= self.virtual_time: line.time", "self.submob_movement_updater = None @staticmethod def shift_func( func: Callable[[np.ndarray], np.ndarray], shift_vector:", "2, fw / 2, pw) y_array = np.linspace(fh / 2,", "* LEFT vf = ArrowVectorField(func, x_range=[-7, 7, 1]) self.add(vf) self.wait()", "vector field and showing their trace. Parameters ---------- func The", "in fully displayed stream lines without a noticeable cut. Returns", "is used as run time. Returns ------- :class:`~.AnimationGroup` The creation", "pw, 3)) x_array = np.linspace(-fw / 2, fw / 2,", "agents along the vector field and showing their trace. Parameters", "on the center of the given :class:`~.Mobject`. If `True` the", "x_range=[-7, 7, 1], y_range=[-4, 4, 1], stroke_width=3, virtual_time=1, # use", "LEFT stream_lines = StreamLines( func, stroke_width=3, max_anchors_per_line=5, virtual_time=1, color=BLUE )", "x, y, z in it.product(x_range, y_range, z_range): self.add(self.get_vector(x * RIGHT", "------ ValueError if no stream line animation is running Examples", "Succession from ..animation.creation import Create from ..animation.indication import ShowPassingFlash from", "for c in colors]) def func(values, opacity=1): alphas = inverse_interpolate(start,", "---------- func The function defining the rate of change at", "/ 2` if not defined. n_repeats The number of agents", "may take a long time to compute. Returns ------- Image.Imgae", "inter_alphas.repeat(3).reshape((len(indices), 3)) result = interpolate(rgbs[indices], rgbs[next_indices], inter_alphas) result = np.concatenate(", "result in a better approximation of the trajectories in the", "- self.x_range[2] or p[1] < self.y_range[0] - self.padding or p[1]", "being used Examples -------- .. manim:: ContinuousMotion class ContinuousMotion(Scene): def", "---------- speed The speed at which to move the submobjects.", "The speed at which to move the submobjects. See :meth:`get_nudge_updater`", "details. pointwise Whether to move the mobject along the vector", "scalar that is used to determine how much a point", "Determining stream line starting positions: x_range: Sequence[float] = None, y_range:", "Callable[[float], float] = linear, line_animation_class: Type[ShowPassingFlash] = ShowPassingFlash, **kwargs )", "def shift_func( func: Callable[[np.ndarray], np.ndarray], shift_vector: np.ndarray, ) -> Callable[[np.ndarray],", "= VMobject() line.duration = step * dt step = max(1,", "The stream lines will continuously flow Parameters ---------- warm_up :", "= None, n_repeats=1, # Determining how lines are drawn dt=0.05,", "- half_noise) * UP + (z - half_noise) * OUT", "config[\"frame_height\"] points_array = np.zeros((ph, pw, 3)) x_array = np.linspace(-fw /", "from math import ceil, floor from typing import Callable, Iterable,", "self.play(stream_lines.end_animation()) \"\"\" if self.flow_animation is None: raise ValueError(\"You have to", "= np.zeros((ph, pw, 3)) x_array = np.linspace(-fw / 2, fw", "p[1] < self.y_range[0] - self.padding or p[1] > self.y_range[1] +", "ValueError(\"You have to start the animation before fading it out.\")", "None, y_range: Sequence[float] = None, z_range: Sequence[float] = None, three_dimensions:", "np.ndarray]` The scaled vector field function. \"\"\" return lambda p:", "color=colors[-1]).shift(LEFT * 5) vf = ArrowVectorField( func, min_color_scheme_value=2, max_color_scheme_value=10, colors=colors", "class does by default not include any visible elements but", "self.single_color = False if color_scheme is None: def color_scheme(p): return", "length_func = lambda x: x / 3 vf2 = ArrowVectorField(func,", "The creation animation of the stream lines. Examples -------- ..", "optional If `True` the animation is initialized line by line.", "vector field. Returns ------- `Callable[[np.ndarray], np.ndarray]` The shifted vector field", "at every position of the vector field. color The color", ".types.opengl_vectorized_mobject import OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS: list = [BLUE_E, GREEN, YELLOW, RED]", "Returns ------- VectorField This vector field. \"\"\" self.remove_updater(self.submob_movement_updater) self.submob_movement_updater =", "max_anchors_per_line=5, # better performance with fewer anchors ) self.play(stream_lines.create()) #", "the speed of such a mobject. pointwise Whether to move", "max_steps if not step: continue if config[\"renderer\"] == \"opengl\": line", "inverse interpolation at :func:`~.inverse_interpolate` colors list of colors to generate", "shorter lines max_anchors_per_line=5, # better performance with fewer anchors )", "= ease_out_sine creation_staring_speed = creation_rate_func(0.001) * 1000 creation_run_time = (", "mobject moves per second is equal to the magnitude of", "Succession( UpdateFromAlphaFunc( line, hide_and_wait, run_time=-line.time / self.flow_speed, ), create, ),", "line.set_stroke( [self.pos_to_color(p) for p in line.get_anchors()], ) else: line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width,", "the mobject will move along the vector field, where its", "The mobject to move along the vector field dt A", "field. Parameters ---------- point The root point of the vector.", "], ) def outside_box(p): return ( p[0] < self.x_range[0] -", "config[\"frame_rate\"] animations = [] self.remove_updater(self.flow_animation) self.flow_animation = None for line", "\"\"\"Stops the continuous movement started using :meth:`start_submobject_movement`. Returns ------- VectorField", "values also result in the last color of the gradient.", "is None: def color_scheme(p): return np.linalg.norm(p) self.color_scheme = color_scheme #", "move the mobject along the vector field. If `False` the", "virtual_time=1, color=BLUE ) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5, time_width=0.5) self.wait(1) self.play(stream_lines.end_animation()) \"\"\"", "import OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS: list = [BLUE_E, GREEN, YELLOW, RED] class", "Callable[[np.ndarray], np.ndarray], shift_vector: np.ndarray, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Shift a", "distance agents can move out of the generation area before", ":class:`~.Mobject` s along the vector field. Parameters ---------- func The", "GREEN, YELLOW, RED] class VectorField(VGroup): \"\"\"A vector field. Vector fields", "mob.nudge_submobjects( dt * speed, pointwise=pointwise, ) self.add_updater(self.submob_movement_updater) return self def", "TODO: Variant of StreamLines that is able to respond to", "Parameters ---------- func The function defining the rate of change", "from ..mobject.geometry import Vector from ..mobject.mobject import Mobject from ..mobject.types.vectorized_mobject", "self.get_vectorized_rgba_gradient_function( min_color_scheme_value, max_color_scheme_value, colors, ) for point in start_points: points", "in the vector field. Higher values therefore result in longer", "ArrowVectorField(VectorField): \"\"\"A :class:`VectorField` represented by a set of change vectors.", "moves per second is equal to the magnitude of the", "None, n_repeats=1, # Determining how lines are drawn dt=0.05, virtual_time=3,", "1, substeps: int = 1, pointwise: bool = False, )", "details. Returns ------- VectorField This vector field. \"\"\" for mob", "2 ) self.n_repeats = n_repeats self.virtual_time = virtual_time self.max_anchors_per_line =", "speed: float = 1, pointwise: bool = False, ) ->", "opacity), name=\"stroke_rgba\", ) else: if np.any(self.z_range != np.array([0, 0.5, 0.5])):", "stroke_width=3, virtual_time=1, # use shorter lines max_anchors_per_line=5, # better performance", "dt: self.nudge(mob, dt * speed, pointwise=pointwise) def start_submobject_movement( self, speed:", "\"\"\"Animates the stream lines using an updater. The stream lines", "+ np.cos(pos[1] / 2) * LEFT stream_lines = StreamLines(func, stroke_width=3,", "value used for inverse interpolation at :func:`~.inverse_interpolate` colors list of", "field. Parameters ---------- speed At `speed=1` the distance a mobject", "5) max_radius = Circle(radius=10, color=colors[-1]).shift(LEFT * 5) vf = ArrowVectorField(", "a :class:`~.Mobject` along the vector field. Parameters ---------- mob The", "stream_lines = StreamLines(func, stroke_width=3, max_anchors_per_line=30) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5) self.wait(stream_lines.virtual_time /", "due to the `lag_ratio`. If undefined, the virtual time of", "Mobject, dt: float = 1, substeps: int = 1, pointwise:", "lag_ratio The lag ratio of the animation. If undefined, it", "get included in the image. Lower values give more accurate", "to be mapped to the first color in `colors`. Lower", "-1 self.add(line.anim.mobject) def updater(mob, dt): for line in mob.stream_lines: line.time", "by a set of change vectors. Vector fields are always", "the color gradient of the vector field. kwargs : Any", "1 return interpolate(c1, c2, alpha) self.pos_to_rgb = pos_to_rgb self.pos_to_color =", "/ stream_lines.flow_speed) \"\"\" for line in self.stream_lines: run_time = line.duration", "self.virtual_time = virtual_time self.max_anchors_per_line = max_anchors_per_line self.padding = padding self.stroke_width", "2), ] self.y_range = y_range or [ floor(-config[\"frame_height\"] / 2),", "along the vector field dt A scalar to the amount", "x_array = np.linspace(-fw / 2, fw / 2, pw) y_array", ":save_last_frame: class SpawningAndFlowingArea(Scene): def construct(self): func = lambda pos: np.sin(pos[0])", "line.anim.begin() line.time = random.random() * self.virtual_time if warm_up: line.time *=", "= np.array(self.func(point)) norm = np.linalg.norm(output) if norm != 0: output", "nudge is divided into. Higher values give more accurate approximations.", "random from math import ceil, floor from typing import Callable,", "in line.points], ) line.set_rgba_array_direct( self.values_to_rgbas(norms, opacity), name=\"stroke_rgba\", ) else: if", "time of every single stream line creation. The runtime of", "import BLUE_E, GREEN, RED, YELLOW, color_to_rgb, rgb_to_color from ..utils.deprecation import", "is used to cap the displayed size of vectors to", "/ line.anim.run_time, 0, 1)) self.add_updater(updater) self.flow_animation = updater self.flow_speed =", "fw / 2, pw) y_array = np.linspace(fh / 2, -fh", "a single value. This value gives the position in the", "`True` the animation is initialized line by line. Otherwise it", "stream_lines.flow_speed) \"\"\" for line in self.stream_lines: run_time = line.duration /", "vect class StreamLines(VectorField): \"\"\"StreamLines represent the flow of a :class:`VectorField`", "function defining a vector at every position. This class does", "to be passed to the :class:`~.Vector` constructor \"\"\" output =", "Parameters ---------- point The root point of the vector. kwargs", "with :meth:`~.Mobject.add_updater`, the mobject will move along the vector field,", "step_size = dt / substeps for _ in range(substeps): if", "TODO why not y_array = y_array.repeat(...)? points_array[:, :, 0] =", "step. Returns ------- float How much the point is shifted.", "which the distance an agent moves per step is stretched.", "= x_range or [ floor(-config[\"frame_width\"] / 2), ceil(config[\"frame_width\"] / 2),", "= lambda pos: (pos[0] * UR + pos[1] * LEFT)", "2) * RIGHT + np.cos(pos[0] / 2) * UP vector_field", "if not self.single_color: self.background_img = self.get_colored_background_image() if config[\"renderer\"] == \"opengl\":", "range(max_steps): last_point = points[-1] new_point = last_point + dt *", "/ self.flow_speed, ), create, ), ) self.remove(line.anim.mobject) line.anim.finish() else: remaining_time", "display size for the vector. By default this is used", "using an updater. The stream lines will continuously flow Parameters", "animation smoothly. Returns an animation resulting in fully displayed stream", "vector field. Higher values therefore result in longer stream lines.", "it will be selected so that the total animation length", "warm_up: line.time *= -1 self.add(line.anim.mobject) def updater(mob, dt): for line", "See :meth:`get_nudge_updater` for details. pointwise Whether to move the mobject", "y_array = y_array.repeat(...)? points_array[:, :, 0] = x_array points_array[:, :,", "delta_z three_dimensions Enables three_dimensions. Default set to False, automatically turns", "1)) self.add_updater(updater) self.flow_animation = updater self.flow_speed = flow_speed self.time_width =", "= 5) -> Image.Image: \"\"\"Generate an image that displays the", "+ 1 if not self.single_color: self.background_img = self.get_colored_background_image() if config[\"renderer\"]", "np.sin(pos[0] / 2) * UR + np.cos(pos[1] / 2) *", "+ np.cos(pos[0] / 2) * UP vector_field = ArrowVectorField( func,", "starts with all lines shown. flow_speed At `flow_speed=1` the distance", "1000 creation_run_time = ( max_run_time / (1 + self.time_width) *", "= [] self.remove_updater(self.flow_animation) self.flow_animation = None for line in self.stream_lines:", "undefined, it will be selected so that the total animation", "animation of the stream lines. The stream lines appear in", "the run time of each stream line creation. run_time The", "BasicUsage(Scene): def construct(self): func = lambda pos: ((pos[0] * UR", "self.n_repeats = n_repeats self.virtual_time = virtual_time self.max_anchors_per_line = max_anchors_per_line self.padding", "UR + np.cos(pos[1] / 2) * LEFT stream_lines = StreamLines(func,", "Mobject from ..mobject.types.vectorized_mobject import VGroup, VMobject from ..utils.bezier import interpolate,", "Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY)) dot = Dot().move_to(circle) vector_field.nudge(circle, -2, 60, True) vector_field.nudge(dot,", "x / 3 vf2 = ArrowVectorField(func, x_range=[-7, 7, 1], length_func=length_func)", "* UP + z * OUT)) self.set_opacity(self.opacity) def get_vector(self, point:", "using the color gradient. Parameters ---------- sampling_rate The stepsize at", "visible elements but provides methods to move other :class:`~.Mobject` s", "self.y_range[0] - self.padding or p[1] > self.y_range[1] + self.padding -", "specifications of this vector field. Parameters ---------- point The root", "* LEFT stream_lines = StreamLines(func, stroke_width=3, max_anchors_per_line=30) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5)", "float, float, float]: vec = self.func(pos) color_value = np.clip( self.color_scheme(vec),", "/ 2) * RIGHT + np.cos(pos[0] / 2) * UP", "vect.set_color(self.pos_to_color(point)) return vect class StreamLines(VectorField): \"\"\"StreamLines represent the flow of", "of such a mobject. pointwise Whether to move the mobject", "\"\"\"Get an update function to move a :class:`~.Mobject` along the", "able to respond to changes in the vector field function", "Returns ------- :class:`~.AnimationGroup` The animation fading out the running stream", "+ np.cos(pos[1] / 2) * LEFT vf = ArrowVectorField(func, x_range=[-7,", "Sequence[float], step_size: float) -> float: \"\"\"Returns the change in position", "not step: continue if config[\"renderer\"] == \"opengl\": line = OpenGLVMobject()", "the flow moves per second is equal to the magnitude", "animation fading out the running stream animation. Raises ------ ValueError", ") self.add(vf, min_radius, max_radius) \"\"\" def __init__( self, func: Callable[[np.ndarray],", "field and showing their trace. Parameters ---------- func The function", "[ \"VectorField\", \"ArrowVectorField\", \"StreamLines\", ] import itertools as it import", "A scalar to the amount the mobject is moved along", "will be selected so that the total animation length is", "(k_1 * 0.5)) k_3 = self.func(p + step_size * (k_2", "the vector field. Parameters ---------- mob The mobject to move", "the given :class:`~.Mobject`. If `True` the vector field takes effect", "the last color of the gradient. colors The colors defining", "min_radius = Circle(radius=2, color=colors[0]).shift(LEFT * 5) max_radius = Circle(radius=10, color=colors[-1]).shift(LEFT", "agent is altered along each axis. Defaults to :code:`delta_y /", "of the gradient. colors The colors defining the color gradient", "each position is calculated by passing the positing through a", "in self.stream_lines ] random.shuffle(animations) return AnimationGroup(*animations, lag_ratio=lag_ratio) def start_animation( self,", "4, 1], stroke_width=3, virtual_time=1, # use shorter lines max_anchors_per_line=5, #", "color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.noise_factor = ( noise_factor", ":, 0] = x_array points_array[:, :, 1] = y_array rgbs", "mobject along the vector field. If `False` the vector field", "y_max, delta_y z_range A sequence of z_min, z_max, delta_z three_dimensions", "def func(values, opacity=1): alphas = inverse_interpolate(start, end, np.array(values)) alphas =", "0.2], padding=1 ) spawning_area = Rectangle(width=6, height=4) flowing_area = Rectangle(width=8,", "constructor \"\"\" output = np.array(self.func(point)) norm = np.linalg.norm(output) if norm", "np.arange(*self.x_range) for y in np.arange(*self.y_range) for z in np.arange(*self.z_range) ],", "the total animation length is 1.5 times the run time", "The values of this functions is displayed by moving many", "None else self.y_range[2] / 2 ) self.n_repeats = n_repeats self.virtual_time", "= None, three_dimensions: bool = False, noise_factor: Optional[float] = None,", "UR + pos[1] * LEFT) - pos stream_lines = StreamLines(", "-> Callable[[np.ndarray], np.ndarray]: \"\"\"Shift a vector field function. Parameters ----------", "fields are based on a function defining a vector at", "always based on a function defining the :class:`~.Vector` at every", "values give more accurate results, but may take a long", "the vector at every position. The values of this functions", "Higher values therefore result in longer stream lines. However, this", "self, start: float, end: float, colors: Iterable, ): \"\"\" Generates", "+ self.padding - self.x_range[2] or p[1] < self.y_range[0] - self.padding", "-> Image.Image: \"\"\"Generate an image that displays the vector field.", "+ np.cos(pos[1]) * LEFT + pos / 5 stream_lines =", "The factor by which the distance an agent moves per", "in the vector field. virtual_time The time the agents get", "for line in self.stream_lines: create = Create( line, run_time=creation_run_time, rate_func=creation_rate_func,", ".. manim:: BasicUsage :save_last_frame: class BasicUsage(Scene): def construct(self): func =", "root point of the vector. kwargs : Any Additional arguments", "* RIGHT + np.cos(pos[0] / 2) * UP vector_field =", "y_array rgbs = np.apply_along_axis(self.pos_to_rgb, 2, points_array) return Image.fromarray((rgbs * 255).astype(\"uint8\"))", "1, pointwise: bool = False, ) -> \"VectorField\": \"\"\"Nudge a", "BLUE, DARK_GRAY] min_radius = Circle(radius=2, color=colors[0]).shift(LEFT * 5) max_radius =", "[*self.submobjects] def create( self, lag_ratio: Optional[float] = None, run_time: Optional[Callable[[float],", "def color_scheme(p): return np.linalg.norm(p) self.color_scheme = color_scheme # TODO maybe", "..mobject.mobject import Mobject from ..mobject.types.vectorized_mobject import VGroup, VMobject from ..utils.bezier", "0]] for i in range(len(self.ranges)): if len(self.ranges[i]) == 2: self.ranges[i]", "def get_vector(self, point: np.ndarray): \"\"\"Creates a vector in the vector", "out displayed norm length_func: Callable[[float], float] = lambda norm: 0.45", "return self def nudge_submobjects( self, dt: float = 1, substeps:", "ratio of the animation. If undefined, it will be selected", "a point along a vector field. Parameters ---------- p The", "position is calculated by passing the positing through a series", "fading out the running stream animation. Raises ------ ValueError if", "padding self.stroke_width = stroke_width half_noise = self.noise_factor / 2 np.random.seed(0)", "np.array([0, 0.5, 0.5])): line.set_stroke( [self.pos_to_color(p) for p in line.get_anchors()], )", "VMobject from ..utils.bezier import interpolate, inverse_interpolate from ..utils.color import BLUE_E,", "the color gradient defined using `min_color_scheme_value`, `max_color_scheme_value` and `colors`. min_color_scheme_value", "1 c1 = self.rgbs[int(alpha)] c2 = self.rgbs[min(int(alpha + 1), len(self.rgbs)", "Rectangle(width=8, height=6) labels = [Tex(\"Spawning Area\"), Tex(\"Flowing Area\").shift(DOWN * 2.5)]", "colors = [RED, YELLOW, BLUE, DARK_GRAY] min_radius = Circle(radius=2, color=colors[0]).shift(LEFT", "time. Returns ------- :class:`~.AnimationGroup` The creation animation of the stream", "float = 1, time_width: float = 0.3, rate_func: Callable[[float], float]", "defining the rate of change at every position of the", "colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, **kwargs ): super().__init__(**kwargs) self.func = func", "/ self.flow_speed creation_rate_func = ease_out_sine creation_staring_speed = creation_rate_func(0.001) * 1000", "colors, ) for point in start_points: points = [point] for", "is moved along the vector field. The actual distance is", "def construct(self): func = lambda pos: pos - LEFT *", "step_size)) return self def nudge_submobjects( self, dt: float = 1,", "max_radius) \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray], color: Optional[Color]", "Sequence[float] = None, y_range: Sequence[float] = None, z_range: Sequence[float] =", "1, pointwise: bool = False, ) -> \"VectorField\": \"\"\"Apply a", "value using the color gradient. Parameters ---------- sampling_rate The stepsize", "a long time to compute. Returns ------- Image.Imgae The vector", "\"\"\"Scale a vector field function. Parameters ---------- func The function", "2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, **kwargs ): super().__init__(**kwargs) self.func =", "line.anim.interpolate(min(line.time / line.anim.run_time, 1)) if alpha == 1: self.remove(line.anim.mobject) line.anim.finish()", "mobject to move along the vector field dt A scalar", "is None: raise ValueError(\"You have to start the animation before", "defining the vector at every position. The values of this", "be applied to the vector field. Returns ------- `Callable[[np.ndarray], np.ndarray]`", "super().__init__( func, color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.noise_factor", "+ dt * func(last_point) if outside_box(new_point): break points.append(new_point) step =", "next_indices = np.clip(indices + 1, 0, len(rgbs) - 1) inter_alphas", "the vector field to all submobjects. Parameters ---------- dt A", "This vector field. \"\"\" for mob in self.submobjects: self.nudge(mob, dt,", "set of change vectors. Vector fields are always based on", "or p[1] > self.y_range[1] + self.padding - self.y_range[2] or p[2]", "= creation_rate_func(0.001) * 1000 creation_run_time = ( max_run_time / (1", "x: x / 2 ) self.add(vector_field) circle = Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY))", "run time of every single stream line creation. The runtime", "0: animations.append( Succession( UpdateFromAlphaFunc( line, hide_and_wait, run_time=-line.time / self.flow_speed, ),", "0] = x_array points_array[:, :, 1] = y_array rgbs =", ": Any Additional arguments to be passed to the :class:`~.Vector`", "a single color.\", ) ph = int(config[\"pixel_height\"] / sampling_rate) pw", "by which the distance an agent moves per step is", "self.ranges = [self.x_range, self.y_range] if three_dimensions or z_range: self.z_range =", "the vector field. If set, position-specific coloring is disabled. color_scheme", "automatically turns True if z_range is not None. noise_factor The", "n_repeats=1, # Determining how lines are drawn dt=0.05, virtual_time=3, max_anchors_per_line=100,", "if line.time >= self.virtual_time: line.time -= self.virtual_time line.anim.interpolate(np.clip(line.time / line.anim.run_time,", "lines. Examples -------- .. manim:: StreamLineCreation class StreamLineCreation(Scene): def construct(self):", "create = Create( line, run_time=creation_run_time, rate_func=creation_rate_func, ) if line.time <=", "vector field. Calling this method multiple times will result in", "flow Parameters ---------- warm_up : bool, optional If `True` the", "methods to move other :class:`~.Mobject` s along the vector field.", "value using `self.color_scheme` and finally generate a color from that", "= np.array( [np.linalg.norm(self.func(point)) for point in line.points], ) line.set_rgba_array_direct( self.values_to_rgbas(norms,", "in range(substeps): if pointwise: mob.apply_function(lambda p: p + runge_kutta(self, p,", "min_color_scheme_value, max_color_scheme_value, ) alpha = inverse_interpolate( min_color_scheme_value, max_color_scheme_value, color_value, )", "generating an image if the vector field uses a single", "same speed # as the regular line flash animation but", "the distance an agent moves per step is stretched. Lower", "* LEFT stream_lines = StreamLines( func, stroke_width=3, max_anchors_per_line=5, virtual_time=1, color=BLUE", "order. Parameters ---------- lag_ratio The lag ratio of the animation.", "alpha = inverse_interpolate( min_color_scheme_value, max_color_scheme_value, color_value, ) alpha *= len(self.rgbs)", "float, end: float, colors: Iterable, ): \"\"\" Generates a gradient", "line animation smoothly. Returns an animation resulting in fully displayed", "== 1: mob.set_stroke(opacity=1) def finish_updater_cycle(line, alpha): line.time += dt *", "import ShowPassingFlash from ..animation.update import UpdateFromAlphaFunc from ..constants import OUT,", "rgb_to_color from ..utils.deprecation import deprecated_params from ..utils.rate_functions import ease_out_sine, linear", "a vector at every position. This class does by default", "numpy array Parameters ---------- start start value used for inverse", "interpolate(rgbs[indices], rgbs[next_indices], inter_alphas) result = np.concatenate( (result, np.full([len(result), 1], opacity)),", "lines using an updater. The stream lines will continuously flow", "line flash animation but eases out. dt = 1 /", "that is used to determine how much a point is", "stream lines. However, this whole time gets simulated upon creation.", "is 1.5 times the run time of each stream line", "sampling_rate The stepsize at which pixels get included in the", "2` if not defined. n_repeats The number of agents generated", "be applied to the vector field. Examples -------- .. manim::", "axis=1, ) return result return func class ArrowVectorField(VectorField): \"\"\"A :class:`VectorField`", "cap the displayed size of vectors to reduce the clutter.", "in np.arange(*self.x_range) for y in np.arange(*self.y_range) for z in np.arange(*self.z_range)", "each stream line creation. run_time The run time of every", "= None return self def get_colored_background_image(self, sampling_rate: int = 5)", "the vector is passed, the returned value will be used", "def finish_updater_cycle(line, alpha): line.time += dt * self.flow_speed line.anim.interpolate(min(line.time /", "creation_rate_func = ease_out_sine creation_staring_speed = creation_rate_func(0.001) * 1000 creation_run_time =", "5) vf = ArrowVectorField( func, min_color_scheme_value=2, max_color_scheme_value=10, colors=colors ) self.add(vf,", "= line.duration / flow_speed line.anim = line_animation_class( line, run_time=run_time, rate_func=rate_func,", "EndAnimation(Scene): def construct(self): func = lambda pos: np.sin(pos[0] / 2)", "line.anim.run_time, 1)) if alpha == 1: self.remove(line.anim.mobject) line.anim.finish() max_run_time =", "p[2] < self.z_range[0] - self.padding or p[2] > self.z_range[1] +", "times the run time of each stream line creation. run_time", "run_time=creation_run_time, rate_func=creation_rate_func, ) if line.time <= 0: animations.append( Succession( UpdateFromAlphaFunc(", "that is able to respond to changes in the vector", "from typing import Callable, Iterable, Optional, Sequence, Tuple, Type import", "self.single_color: raise ValueError( \"There is no point in generating an", "None: self.single_color = False if color_scheme is None: def color_scheme(p):", "vectors. The actual size of the vector is passed, the", "to be passed to the :class:`~.Vector` constructor kwargs : Any", "0.5, 0.5])): line.set_stroke( [self.pos_to_color(p) for p in line.get_anchors()], ) else:", "At `flow_speed=1` the distance the flow moves per second is", "running stream animation. Raises ------ ValueError if no stream line", "float] = lambda norm: 0.45 * sigmoid(norm), opacity: float =", "-------- .. manim:: ScaleVectorFieldFunction class ScaleVectorFieldFunction(Scene): def construct(self): func =", "x in np.arange(*self.x_range) for y in np.arange(*self.y_range) for z in", "determined by the magnitude of the vector field. Parameters ----------", "points_array) return Image.fromarray((rgbs * 255).astype(\"uint8\")) def get_vectorized_rgba_gradient_function( self, start: float,", "= None, color_scheme: Optional[Callable[[np.ndarray], float]] = None, min_color_scheme_value: float =", "if lag_ratio is None: lag_ratio = run_time / 2 /", "maybe other default for direction? self.rgbs = np.array(list(map(color_to_rgb, colors))) def", "\"ArrowVectorField\", \"StreamLines\", ] import itertools as it import random from", "mobject will move along the vector field, where its speed", "gradient. Parameters ---------- sampling_rate The stepsize at which pixels get", "UpdateFromAlphaFunc( line, hide_and_wait, run_time=-line.time / self.flow_speed, ), create, ), )", "import UpdateFromAlphaFunc from ..constants import OUT, RIGHT, UP from ..mobject.geometry", "creation. max_anchors_per_line The maximum number of anchors per line. Lines", "can move out of the generation area before being terminated.", "size for the vector. By default this is used to", "self.x_range, self.y_range, self.z_range = self.ranges super().__init__( func, color, color_scheme, min_color_scheme_value,", "will move along the vector field, where its speed is", "def pos_to_rgb(pos: np.ndarray) -> Tuple[float, float, float, float]: vec =", "= 0.3, rate_func: Callable[[float], float] = linear, line_animation_class: Type[ShowPassingFlash] =", "at that position, map that vector to a single value", "= scaled_alphas.astype(int) next_indices = np.clip(indices + 1, 0, len(rgbs) -", "stream lines. The stream lines appear in random order. Parameters", "how much a point is shifted in a single step.", "points_array = np.zeros((ph, pw, 3)) x_array = np.linspace(-fw / 2,", "config[\"renderer\"] == \"opengl\": # scaled for compatibility with cairo line.set_stroke(width=self.stroke_width", "self.add(circle.copy().set_color(GRAY)) dot = Dot().move_to(circle) vector_field.nudge(circle, -2, 60, True) vector_field.nudge(dot, -2,", "(len(rgbs) - 1) indices = scaled_alphas.astype(int) next_indices = np.clip(indices +", "result = interpolate(rgbs[indices], rgbs[next_indices], inter_alphas) result = np.concatenate( (result, np.full([len(result),", "**kwargs ) -> None: \"\"\"Animates the stream lines using an", "self.padding or p[0] > self.x_range[1] + self.padding - self.x_range[2] or", "used as run time. Returns ------- :class:`~.AnimationGroup` The creation animation", "self.y_range, self.z_range = self.ranges super().__init__( func, color, color_scheme, min_color_scheme_value, max_color_scheme_value,", "compute. Returns ------- Image.Imgae The vector field image. \"\"\" if", "= Create( line, run_time=creation_run_time, rate_func=creation_rate_func, ) if line.time <= 0:", "vector field. kwargs : Any Additional arguments to be passed", "is None: self.single_color = False if color_scheme is None: def", "submobjects along the vector field. Calling this method multiple times", "UR + pos[1] * LEFT) - pos) / 3 self.add(ArrowVectorField(func))", "= y_range or [ floor(-config[\"frame_height\"] / 2), ceil(config[\"frame_height\"] / 2),", "Callable[[np.ndarray], np.ndarray]: \"\"\"Scale a vector field function. Parameters ---------- func", "lines. However, this whole time gets simulated upon creation. max_anchors_per_line", "from ..utils.rate_functions import ease_out_sine, linear from ..utils.simple_functions import sigmoid from", "func(last_point) if outside_box(new_point): break points.append(new_point) step = max_steps if not", "using :meth:`start_submobject_movement`. Returns ------- VectorField This vector field. \"\"\" self.remove_updater(self.submob_movement_updater)", "self.play(vf.animate.become(vf2)) self.wait() .. manim:: Coloring :save_last_frame: class Coloring(Scene): def construct(self):", "axis. Defaults to :code:`delta_y / 2` if not defined. n_repeats", "the whole nudge is divided into. Higher values give more", "color self.submob_movement_updater = None @staticmethod def shift_func( func: Callable[[np.ndarray], np.ndarray],", "the displayed size of the vectors. The actual size of", "float] = linear, line_animation_class: Type[ShowPassingFlash] = ShowPassingFlash, **kwargs ) ->", "line.anim.run_time, 0, 1)) self.add_updater(updater) self.flow_animation = updater self.flow_speed = flow_speed", "UR + np.cos(pos[1]) * LEFT + pos / 5 stream_lines", "vectors. Vector fields are always based on a function defining", "p[2] > self.z_range[1] + self.padding - self.z_range[2] ) max_steps =", "= StreamLines( func, color=YELLOW, x_range=[-7, 7, 1], y_range=[-4, 4, 1],", "Create from ..animation.indication import ShowPassingFlash from ..animation.update import UpdateFromAlphaFunc from", "vector fields.\"\"\" __all__ = [ \"VectorField\", \"ArrowVectorField\", \"StreamLines\", ] import", "False, ) -> \"VectorField\": \"\"\"Nudge a :class:`~.Mobject` along the vector", "in labels: lbl.add_background_rectangle(opacity=0.6, buff=0.05) self.add(stream_lines, spawning_area, flowing_area, *labels) \"\"\" def", "noise_factor: Optional[float] = None, n_repeats=1, # Determining how lines are", "self.single_color: line.set_stroke(self.color) else: if config[\"renderer\"] == \"opengl\": # scaled for", "vector at every position. This class does by default not", "moving all submobjects along the vector field. Calling this method", "See :meth:`nudge` for details. Returns ------- VectorField This vector field.", "class Coloring(Scene): def construct(self): func = lambda pos: pos -", "np.ndarray): \"\"\"Creates a vector in the vector field. The created", "Nudging(Scene): def construct(self): func = lambda pos: np.sin(pos[1] / 2)", "field. \"\"\" self.remove_updater(self.submob_movement_updater) self.submob_movement_updater = None return self def get_colored_background_image(self,", "of the arrows. vector_config Additional arguments to be passed to", "The function defining a vector field. shift_vector The shift to", "the first color of the gradient. max_color_scheme_value The value of", "distance an agent moves per step is stretched. Lower values", "the generation area before being terminated. stroke_width The stroke with", "the stream lines using an updater. The stream lines will", "= 1, substeps: int = 1, pointwise: bool = False,", "-> \"VectorField\": \"\"\"Apply a nudge along the vector field to", "2) * LEFT stream_lines = StreamLines(func, stroke_width=3, max_anchors_per_line=30) self.add(stream_lines) stream_lines.start_animation(warm_up=False,", "/ 5 stream_lines = StreamLines( func, x_range=[-3, 3, 0.2], y_range=[-2,", "np.arange(*self.z_range) for x, y, z in it.product(x_range, y_range, z_range): self.add(self.get_vector(x", "distance the flow moves per second is equal to the", "@staticmethod def shift_func( func: Callable[[np.ndarray], np.ndarray], shift_vector: np.ndarray, ) ->", "path. The speed value scales the speed of this flow.", "single value. This value gives the position in the color", "vf = ArrowVectorField(func, x_range=[-7, 7, 1]) self.add(vf) self.wait() length_func =", "eases out. dt = 1 / config[\"frame_rate\"] animations = []", "any visible elements but provides methods to move other :class:`~.Mobject`", "defining the color gradient of the vector field. kwargs :", "a vector in the vector field. The created vector is", "runge_kutta(self, p: Sequence[float], step_size: float) -> float: \"\"\"Returns the change", "virtual_time as run_time self.wait() \"\"\" if run_time is None: run_time", "def nudge( self, mob: Mobject, dt: float = 1, substeps:", "vector to a single value. This value gives the position", "a mobject moves per second is equal to the magnitude", "else: remaining_time = max_run_time - line.time / self.flow_speed animations.append( Succession(", "the speed of this flow. time_width The proportion of the", "color_scheme(p): return np.linalg.norm(p) self.color_scheme = color_scheme # TODO maybe other", "np.linalg.norm(p) self.color_scheme = color_scheme # TODO maybe other default for", "] import itertools as it import random from math import", "for i in range(len(self.ranges)): if len(self.ranges[i]) == 2: self.ranges[i] +=", "= time_width def end_animation(self) -> AnimationGroup: \"\"\"End the stream line", "time_width=time_width, **kwargs, ) line.anim.begin() line.time = random.random() * self.virtual_time if", "total animation length is 1.5 times the run time of", "y * UP + z * OUT)) self.set_opacity(self.opacity) def get_vector(self,", "to the :class:`~.VGroup` constructor Examples -------- .. manim:: BasicUsage :save_last_frame:", "3 self.add(ArrowVectorField(func)) .. manim:: SizingAndSpacing class SizingAndSpacing(Scene): def construct(self): func", "1 inter_alphas = inter_alphas.repeat(3).reshape((len(indices), 3)) result = interpolate(rgbs[indices], rgbs[next_indices], inter_alphas)", "the regular line flash animation but eases out. dt =", "for inverse interpolation at :func:`~.inverse_interpolate` end end value used for", "The runtime of the whole animation might be longer due", "* LEFT) - pos stream_lines = StreamLines( func, color=YELLOW, x_range=[-7,", "noise_factor if noise_factor is not None else self.y_range[2] / 2", "the stream line animation smoothly. Returns an animation resulting in", "len(self.rgbs) - 1)] alpha %= 1 return interpolate(c1, c2, alpha)", "points_array[:, :, 1] = y_array rgbs = np.apply_along_axis(self.pos_to_rgb, 2, points_array)", "((pos[0] * UR + pos[1] * LEFT) - pos) /", "length. padding The distance agents can move out of the", "be selected so that the total animation length is 1.5", "the vector field. The created vector is based on the", "creation_run_time is calculated so that the creation animation starts at", "See :meth:`nudge` for details. Returns ------- Callable[[Mobject, float], Mobject] The", "is based on the magnitude of the vector field. substeps", "x_range A sequence of x_min, x_max, delta_x y_range A sequence", "\"\"\" if self.single_color: raise ValueError( \"There is no point in", "in actual norm, spits out displayed norm length_func: Callable[[float], float]", "such a mobject. pointwise Whether to move the mobject along", ":class:`~.Mobject`, potentially distorting it. Returns ------- VectorField This vector field.", "n_repeats The number of agents generated at each starting point.", "linear from ..utils.simple_functions import sigmoid from .types.opengl_vectorized_mobject import OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS:", "YELLOW, BLUE, DARK_GRAY] min_radius = Circle(radius=2, color=colors[0]).shift(LEFT * 5) max_radius", "the mobject is moved along the vector field. The actual", "anchors per line. Lines with more anchors get reduced in", "`True` the vector field takes effect on the points of", "True self.color = color self.submob_movement_updater = None @staticmethod def shift_func(", "be used however. Parameters ---------- func The function defining the", "or p[2] < self.z_range[0] - self.padding or p[2] > self.z_range[1]", "= Circle(radius=2, color=colors[0]).shift(LEFT * 5) max_radius = Circle(radius=10, color=colors[-1]).shift(LEFT *", "+ pos / 5 stream_lines = StreamLines( func, x_range=[-3, 3,", "vector field. color The color of the vector field. If", "max_color_scheme_value, colors, ) for point in start_points: points = [point]", ") -> Callable[[Mobject, float], Mobject]: \"\"\"Get an update function to", "{} self.vector_config = vector_config self.func = func x_range = np.arange(*self.x_range)", "rgbs[next_indices], inter_alphas) result = np.concatenate( (result, np.full([len(result), 1], opacity)), axis=1,", "list of colors to generate the gradient Returns ------- function", "noise_factor The amount by which the starting position of each", "if noise_factor is not None else self.y_range[2] / 2 )", "2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining Vector positions: x_range:", "arguments to be passed to the :class:`~.VGroup` constructor Examples --------", "lambda pos: np.sin(pos[1] / 2) * RIGHT + np.cos(pos[0] /", "[] self.remove_updater(self.flow_animation) self.flow_animation = None for line in self.stream_lines: create", "The function determining the displayed size of the vectors. The", "anchors get reduced in complexity, not in length. padding The", "ArrowVectorField( func, x_range=[-7, 7, 1], y_range=[-4, 4, 1], length_func=lambda x:", "np.clip(indices + 1, 0, len(rgbs) - 1) inter_alphas = scaled_alphas", "the stream lines. The stream lines appear in random order.", "= np.arange(*self.y_range) z_range = np.arange(*self.z_range) for x, y, z in", "dot.add_updater(vector_field.get_nudge_updater()) self.add(circle, dot) self.wait(6) \"\"\" def runge_kutta(self, p: Sequence[float], step_size:", "as it import random from math import ceil, floor from", "run time of each stream line creation. run_time The run", "numpy as np from colour import Color from PIL import", "---------- p The position of each point being moved along", "vectors. By default the color of each vector is determined", "Additional arguments to be passed to the :class:`~.Vector` constructor kwargs", "self.wait() \"\"\" if run_time is None: run_time = self.virtual_time if", "VectorField.scale_func(func, 0.5) self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait() Returns ------- `Callable[[np.ndarray], np.ndarray]` The scaled", "initialized line by line. Otherwise it starts with all lines", "7, 1], length_func=length_func) self.play(vf.animate.become(vf2)) self.wait() .. manim:: Coloring :save_last_frame: class", "the :class:`~.Vector` constructor kwargs : Any Additional arguments to be", "start_submobject_movement( self, speed: float = 1, pointwise: bool = False,", "p: p + runge_kutta(self, p, step_size)) else: mob.shift(runge_kutta(self, mob.get_center(), step_size))", "\"\"\"A vector field. Vector fields are based on a function", "color_scheme is None: def color_scheme(p): return np.linalg.norm(p) self.color_scheme = color_scheme", "stroke_width=1, opacity=1, **kwargs ): self.x_range = x_range or [ floor(-config[\"frame_width\"]", "virtual_time=3, max_anchors_per_line=100, padding=3, # Determining stream line appearance: stroke_width=1, opacity=1,", "norm != 0: output *= self.length_func(norm) / norm vect =", "with fewer anchors ) self.play(stream_lines.create()) # uses virtual_time as run_time", "2), ceil(config[\"frame_height\"] / 2), ] self.ranges = [self.x_range, self.y_range] if", "* dt step = max(1, int(len(points) / self.max_anchors_per_line)) line.set_points_smoothly(points[::step]) if", "func The function defining the rate of change at every", "stream_lines = StreamLines( func, stroke_width=3, max_anchors_per_line=5, virtual_time=1, color=BLUE ) self.add(stream_lines)", "# Takes in actual norm, spits out displayed norm length_func:", "if not step: continue if config[\"renderer\"] == \"opengl\": line =", "7, 1], y_range=[-4, 4, 1], length_func=lambda x: x / 2", "SpawningAndFlowingArea :save_last_frame: class SpawningAndFlowingArea(Scene): def construct(self): func = lambda pos:", "def construct(self): func = lambda pos: np.sin(pos[1] / 2) *", "= flow_speed self.time_width = time_width def end_animation(self) -> AnimationGroup: \"\"\"End", "- pos) / 3 self.add(StreamLines(func)) .. manim:: SpawningAndFlowingArea :save_last_frame: class", "step_size: float) -> float: \"\"\"Returns the change in position of", "mob.get_center(), step_size)) return self def nudge_submobjects( self, dt: float =", "along the vector field. The actual distance is based on", "of z_min, z_max, delta_z three_dimensions Enables three_dimensions. Default set to", "StreamLines(VectorField): \"\"\"StreamLines represent the flow of a :class:`VectorField` using the", "of the stream lines is used as run time. Returns", "in range(len(self.ranges)): if len(self.ranges[i]) == 2: self.ranges[i] += [0.5] self.ranges[i][1]", "proportion of the stream line shown while being animated rate_func", ":class:`~.VGroup` constructor \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray], color:", "in the first color of the gradient. max_color_scheme_value The value", ":class:`~.Mobject` along the vector field. Parameters ---------- mob The mobject", "(pos[0] * UR + pos[1] * LEFT) - pos stream_lines", "int(config[\"pixel_height\"] / sampling_rate) pw = int(config[\"pixel_width\"] / sampling_rate) fw =", "stream line animation is running Examples -------- .. manim:: EndAnimation", "flow of a :class:`VectorField` using the trace of moving agents.", "= ArrowVectorField(func) self.add(vector_field) self.wait() func = VectorField.scale_func(func, 0.5) self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait()", "= step * dt step = max(1, int(len(points) / self.max_anchors_per_line))", "lag_ratio: Optional[float] = None, run_time: Optional[Callable[[float], float]] = None, **kwargs", "Takes in actual norm, spits out displayed norm length_func: Callable[[float],", "the animation before fading it out.\") def hide_and_wait(mob, alpha): if", "using the trace of moving agents. Vector fields are always", "def stop_submobject_movement(self) -> \"VectorField\": \"\"\"Stops the continuous movement started using", "The maximum number of anchors per line. Lines with more", "flashing line_animation_class The animation class being used Examples -------- ..", "length is 1.5 times the run time of each stream", "magnitude of the vector field. substeps The amount of steps", "line = OpenGLVMobject() else: line = VMobject() line.duration = step", "= interpolate(rgbs[indices], rgbs[next_indices], inter_alphas) result = np.concatenate( (result, np.full([len(result), 1],", "- 1 c1 = self.rgbs[int(alpha)] c2 = self.rgbs[min(int(alpha + 1),", "run_time / 2 / len(self.submobjects) animations = [ Create(line, run_time=run_time,", "color_scheme function to be mapped to the last color in", "every position of the vector field. color The color of", "float = 0.3, rate_func: Callable[[float], float] = linear, line_animation_class: Type[ShowPassingFlash]", "+ 1, 0, len(rgbs) - 1) inter_alphas = scaled_alphas %", "`self.color_scheme` and finally generate a color from that value using", "opacity: float = 1.0, vector_config: Optional[dict] = None, **kwargs ):", "change in position of a point along a vector field.", "field. Higher values therefore result in longer stream lines. However,", "given :class:`~.Mobject`. If `True` the vector field takes effect on", "of change at every position of the vector field. color", "lines. Examples -------- .. manim:: BasicUsage :save_last_frame: class BasicUsage(Scene): def", "time gets simulated upon creation. max_anchors_per_line The maximum number of", "vector field. The actual distance is based on the magnitude", "line, run_time=creation_run_time, rate_func=creation_rate_func, ) if line.time <= 0: animations.append( Succession(", "LEFT * 5 colors = [RED, YELLOW, BLUE, DARK_GRAY] min_radius", "min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.noise_factor = ( noise_factor if", "line.duration = step * dt step = max(1, int(len(points) /", "in it.product(x_range, y_range, z_range): self.add(self.get_vector(x * RIGHT + y *", "k_3 + k_4) step_size = dt / substeps for _", "stroke with of the stream lines. opacity The opacity of", "rate_func: Callable[[float], float] = linear, line_animation_class: Type[ShowPassingFlash] = ShowPassingFlash, **kwargs", "Optional[float] = None, run_time: Optional[Callable[[float], float]] = None, **kwargs )", "pos_to_rgb self.pos_to_color = lambda pos: rgb_to_color(self.pos_to_rgb(pos)) else: self.single_color = True", "= lambda pos: rgb_to_color(self.pos_to_rgb(pos)) else: self.single_color = True self.color =", "all submobjects. Parameters ---------- dt A scalar to the amount", "their trace. Parameters ---------- func The function defining the rate", "floor(-config[\"frame_height\"] / 2), ceil(config[\"frame_height\"] / 2), ] self.ranges = [self.x_range,", "dot) self.wait(6) \"\"\" def runge_kutta(self, p: Sequence[float], step_size: float) ->", "color_scheme function to be mapped to the first color in", "the arrows. vector_config Additional arguments to be passed to the", "which to move the submobjects. See :meth:`get_nudge_updater` for details. pointwise", "color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.noise_factor = (", "alpha) self.pos_to_rgb = pos_to_rgb self.pos_to_color = lambda pos: rgb_to_color(self.pos_to_rgb(pos)) else:", "Vector(output, **self.vector_config) vect.shift(point) if self.single_color: vect.set_color(self.color) else: vect.set_color(self.pos_to_color(point)) return vect", "showing their trace. Parameters ---------- func The function defining the", "scales the speed of this flow. time_width The proportion of", "animation class being used Examples -------- .. manim:: ContinuousMotion class", "mobject. pointwise Whether to move the mobject along the vector", "Parameters ---------- mob The mobject to move along the vector", "By default this is used to cap the displayed size", "import ceil, floor from typing import Callable, Iterable, Optional, Sequence,", "self.set_opacity(self.opacity) def get_vector(self, point: np.ndarray): \"\"\"Creates a vector in the", "of the whole animation might be longer due to the", "vector_config Additional arguments to be passed to the :class:`~.Vector` constructor", "Optional, Sequence, Tuple, Type import numpy as np from colour", "pos: np.sin(pos[0]) * UR + np.cos(pos[1]) * LEFT + pos", "= max_run_time - line.time / self.flow_speed animations.append( Succession( UpdateFromAlphaFunc( line,", "mapped to the first color in `colors`. Lower values also", "cut. Returns ------- :class:`~.AnimationGroup` The animation fading out the running", "line, finish_updater_cycle, run_time=remaining_time, ), create, ), ) return AnimationGroup(*animations) #", ".. manim:: Nudging class Nudging(Scene): def construct(self): func = lambda", "construct(self): func = lambda pos: np.sin(pos[0]) * UR + np.cos(pos[1])", "scales the speed of such a mobject. pointwise Whether to", "[ floor(-config[\"frame_width\"] / 2), ceil(config[\"frame_width\"] / 2), ] self.y_range =", ") else: if np.any(self.z_range != np.array([0, 0.5, 0.5])): line.set_stroke( [self.pos_to_color(p)", "passed to the :class:`~.Vector` constructor \"\"\" output = np.array(self.func(point)) norm", "noise_factor is not None else self.y_range[2] / 2 ) self.n_repeats", "= 1.0, vector_config: Optional[dict] = None, **kwargs ): self.x_range =", "x_range or [ floor(-config[\"frame_width\"] / 2), ceil(config[\"frame_width\"] / 2), ]", "def get_colored_background_image(self, sampling_rate: int = 5) -> Image.Image: \"\"\"Generate an", "= Dot().move_to(circle) vector_field.nudge(circle, -2, 60, True) vector_field.nudge(dot, -2, 60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True))", "vector field function at that position, map that vector to", "result in the last color of the gradient. colors The", "animation. Raises ------ ValueError if no stream line animation is", "shifted in a single step. Returns ------- float How much", "function. Parameters ---------- func The function defining a vector field.", "is displayed as a grid of vectors. By default the", "np.random.seed(0) start_points = np.array( [ (x - half_noise) * RIGHT", "method multiple times will result in removing the previous updater", "three_dimensions: bool = False, noise_factor: Optional[float] = None, n_repeats=1, #", "= False, ) -> Callable[[Mobject, float], Mobject]: \"\"\"Get an update", ") -> \"VectorField\": \"\"\"Start continuously moving all submobjects along the", "- self.padding or p[1] > self.y_range[1] + self.padding - self.y_range[2]", "1) inter_alphas = scaled_alphas % 1 inter_alphas = inter_alphas.repeat(3).reshape((len(indices), 3))", "stream lines without a noticeable cut. Returns ------- :class:`~.AnimationGroup` The", "Parameters ---------- func The function defining a vector field. shift_vector", "self.rgbs = np.array(list(map(color_to_rgb, colors))) def pos_to_rgb(pos: np.ndarray) -> Tuple[float, float,", "substeps The amount of steps the whole nudge is divided", "function. \"\"\" return lambda p: func(p * scalar) def nudge(", "Parameters ---------- p The position of each point being moved", "range(self.n_repeats) for x in np.arange(*self.x_range) for y in np.arange(*self.y_range) for", "actual norm, spits out displayed norm length_func: Callable[[float], float] =", "of each agent is altered along each axis. Defaults to", "Returns ------- :class:`~.AnimationGroup` The creation animation of the stream lines.", "dt): for line in mob.stream_lines: line.time += dt * flow_speed", "constructor kwargs : Any Additional arguments to be passed to", "a set of change vectors. Vector fields are always based", "colors, **kwargs, ) self.length_func = length_func self.opacity = opacity if", "= False, ) -> \"VectorField\": \"\"\"Nudge a :class:`~.Mobject` along the", "------- VectorField This vector field. \"\"\" for mob in self.submobjects:", "func, min_color_scheme_value=2, max_color_scheme_value=10, colors=colors ) self.add(vf, min_radius, max_radius) \"\"\" def", "max_color_scheme_value, color_value, ) alpha *= len(self.rgbs) - 1 c1 =", "of the vector field. x_range A sequence of x_min, x_max,", "along the vector field. When used with :meth:`~.Mobject.add_updater`, the mobject", ">= self.virtual_time: line.time -= self.virtual_time line.anim.interpolate(np.clip(line.time / line.anim.run_time, 0, 1))", "= 1 / config[\"frame_rate\"] animations = [] self.remove_updater(self.flow_animation) self.flow_animation =", "/ 2), ] self.y_range = y_range or [ floor(-config[\"frame_height\"] /", "int(config[\"pixel_width\"] / sampling_rate) fw = config[\"frame_width\"] fh = config[\"frame_height\"] points_array", "self.length_func(norm) / norm vect = Vector(output, **self.vector_config) vect.shift(point) if self.single_color:", "stream_lines = StreamLines( func, color=YELLOW, x_range=[-7, 7, 1], y_range=[-4, 4,", "None: raise ValueError(\"You have to start the animation before fading", "actual size of the vector is passed, the returned value", "vector is passed, the returned value will be used as", "else: if np.any(self.z_range != np.array([0, 0.5, 0.5])): line.set_stroke( [self.pos_to_color(p) for", "line_animation_class( line, run_time=run_time, rate_func=rate_func, time_width=time_width, **kwargs, ) line.anim.begin() line.time =", ") ph = int(config[\"pixel_height\"] / sampling_rate) pw = int(config[\"pixel_width\"] /", "color schemes can be used however. Parameters ---------- func The", "gradient. colors The colors defining the color gradient of the", "the magnitude of the vector field. substeps The amount of", "False, # Automatically True if z_range is set # Takes", "list = [BLUE_E, GREEN, YELLOW, RED] class VectorField(VGroup): \"\"\"A vector", "* func(last_point) if outside_box(new_point): break points.append(new_point) step = max_steps if", "x_array = x_array.reshape((1, len(x_array))) y_array = y_array.reshape((len(y_array), 1)) x_array =", "rate_func The rate function of each stream line flashing line_animation_class", "import Mobject from ..mobject.types.vectorized_mobject import VGroup, VMobject from ..utils.bezier import", "The opacity of the stream lines. Examples -------- .. manim::", ") for point in start_points: points = [point] for _", "RED] class VectorField(VGroup): \"\"\"A vector field. Vector fields are based", "float = 1, pointwise: bool = False, ) -> \"VectorField\":", "defining the :class:`~.Vector` at every position. The values of this", "1.5 times the run time of each stream line creation.", "at each starting point. dt The factor by which the", "* UP vector_field = ArrowVectorField(func) self.add(vector_field) self.wait() func = VectorField.scale_func(func,", "is not None else self.y_range[2] / 2 ) self.n_repeats =", "stroke_width half_noise = self.noise_factor / 2 np.random.seed(0) start_points = np.array(", "the vector field and showing their trace. Parameters ---------- func", "from .types.opengl_vectorized_mobject import OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS: list = [BLUE_E, GREEN, YELLOW,", "agents. Vector fields are always based on a function defining", "\"\"\"Creates a vector in the vector field. The created vector", "lbl in labels: lbl.add_background_rectangle(opacity=0.6, buff=0.05) self.add(stream_lines, spawning_area, flowing_area, *labels) \"\"\"", "a single step. Returns ------- float How much the point", "else: if config[\"renderer\"] == \"opengl\": # scaled for compatibility with", "== 1: self.remove(line.anim.mobject) line.anim.finish() max_run_time = self.virtual_time / self.flow_speed creation_rate_func", "np.ndarray]: \"\"\"Scale a vector field function. Parameters ---------- func The", "in a single step. Returns ------- float How much the", "start_points = np.array( [ (x - half_noise) * RIGHT +", "along the vector field, where its speed is determined by", "( noise_factor if noise_factor is not None else self.y_range[2] /", "an animation resulting in fully displayed stream lines without a", "of the stream lines. The stream lines appear in random", "longer due to the `lag_ratio`. If undefined, the virtual time", "= self.get_colored_background_image() if config[\"renderer\"] == \"opengl\": self.values_to_rgbas = self.get_vectorized_rgba_gradient_function( min_color_scheme_value,", "-> Callable[[np.ndarray], np.ndarray]: \"\"\"Scale a vector field function. Parameters ----------", "color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.length_func = length_func self.opacity", "= func x_range = np.arange(*self.x_range) y_range = np.arange(*self.y_range) z_range =", "p[0] < self.x_range[0] - self.padding or p[0] > self.x_range[1] +", "animations = [] self.remove_updater(self.flow_animation) self.flow_animation = None for line in", "vector_field = ArrowVectorField( func, x_range=[-7, 7, 1], y_range=[-4, 4, 1],", "be passed to the :class:`~.Vector` constructor \"\"\" output = np.array(self.func(point))", "passed to the :class:`~.VGroup` constructor \"\"\" def __init__( self, func:", "ease_out_sine, linear from ..utils.simple_functions import sigmoid from .types.opengl_vectorized_mobject import OpenGLVMobject", "to generate the gradients as numpy arrays representing rgba values", "2) * UR + np.cos(pos[1] / 2) * LEFT vf", "dt=0.05, virtual_time=3, max_anchors_per_line=100, padding=3, # Determining stream line appearance: stroke_width=1,", "Tuple[float, float, float, float]: vec = self.func(pos) color_value = np.clip(", "func x_range = np.arange(*self.x_range) y_range = np.arange(*self.y_range) z_range = np.arange(*self.z_range)", "being moved along the vector field. step_size A scalar that", "it out.\") def hide_and_wait(mob, alpha): if alpha == 0: mob.set_stroke(opacity=0)", "Raises ------ ValueError if no stream line animation is running", "step * dt step = max(1, int(len(points) / self.max_anchors_per_line)) line.set_points_smoothly(points[::step])", "line in mob.stream_lines: line.time += dt * flow_speed if line.time", "dt A scalar to the amount the mobject is moved", "in `colors`. Lower values also result in the first color", "np.ndarray], scalar: float, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Scale a vector", "of vectors to reduce the clutter. opacity The opacity of", "manim:: BasicUsage :save_last_frame: class BasicUsage(Scene): def construct(self): func = lambda", "longer stream lines. However, this whole time gets simulated upon", "line.time *= -1 self.add(line.anim.mobject) def updater(mob, dt): for line in", "rate_func=creation_rate_func, ) if line.time <= 0: animations.append( Succession( UpdateFromAlphaFunc( line,", "function determining the displayed size of the vectors. The actual", "passed to the :class:`~.Vector` constructor kwargs : Any Additional arguments", "if norm != 0: output *= self.length_func(norm) / norm vect", "image. Lower values give more accurate results, but may take", "True if z_range is not None. noise_factor The amount by", "( max_run_time / (1 + self.time_width) * creation_staring_speed ) #", "* 255).astype(\"uint8\")) def get_vectorized_rgba_gradient_function( self, start: float, end: float, colors:", "def construct(self): func = lambda pos: np.sin(pos[0] / 2) *", "self.func(pos) color_value = np.clip( self.color_scheme(vec), min_color_scheme_value, max_color_scheme_value, ) alpha =", "vector field. shift_vector The scalar to be applied to the", "change at every position of the vector field. color The", "self, speed: float = 1, pointwise: bool = False, )", "+ 1), len(self.rgbs) - 1)] alpha %= 1 return interpolate(c1,", "---------- sampling_rate The stepsize at which pixels get included in", "2) * LEFT stream_lines = StreamLines( func, stroke_width=3, max_anchors_per_line=5, virtual_time=1,", "are based on a function defining a vector at every", "float = 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining Vector", "Rectangle(width=6, height=4) flowing_area = Rectangle(width=8, height=6) labels = [Tex(\"Spawning Area\"),", "= int(config[\"pixel_height\"] / sampling_rate) pw = int(config[\"pixel_width\"] / sampling_rate) fw", "color_value, ) alpha *= len(self.rgbs) - 1 c1 = self.rgbs[int(alpha)]", "determine how much a point is shifted in a single", "field. \"\"\" self.stop_submobject_movement() self.submob_movement_updater = lambda mob, dt: mob.nudge_submobjects( dt", "animated rate_func The rate function of each stream line flashing", "= np.arange(*self.z_range) for x, y, z in it.product(x_range, y_range, z_range):", "/ dt) + 1 if not self.single_color: self.background_img = self.get_colored_background_image()", "TODO maybe other default for direction? self.rgbs = np.array(list(map(color_to_rgb, colors)))", "amount of steps the whole nudge is divided into. Higher", "for y in np.arange(*self.y_range) for z in np.arange(*self.z_range) ], )", "all submobjects along the vector field. Calling this method multiple", "value of the color_scheme function to be mapped to the", "= inverse_interpolate( min_color_scheme_value, max_color_scheme_value, color_value, ) alpha *= len(self.rgbs) -", "the vector field. Examples -------- .. manim:: ScaleVectorFieldFunction class ScaleVectorFieldFunction(Scene):", "self.add(vector_field) circle = Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY)) dot = Dot().move_to(circle) vector_field.nudge(circle, -2,", ") self.add(vector_field) circle = Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY)) dot = Dot().move_to(circle) vector_field.nudge(circle,", "for details. pointwise Whether to move the mobject along the", "details. Returns ------- Callable[[Mobject, float], Mobject] The update function. \"\"\"", "GREEN, RED, YELLOW, color_to_rgb, rgb_to_color from ..utils.deprecation import deprecated_params from", "at :func:`~.inverse_interpolate` colors list of colors to generate the gradient", ") -> Callable[[np.ndarray], np.ndarray]: \"\"\"Shift a vector field function. Parameters", "if config[\"renderer\"] == \"opengl\": self.values_to_rgbas = self.get_vectorized_rgba_gradient_function( min_color_scheme_value, max_color_scheme_value, colors,", "= VectorField.scale_func(func, 0.5) self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait() Returns ------- `Callable[[np.ndarray], np.ndarray]` The", "result in the first color of the gradient. max_color_scheme_value The", "# Automatically True if z_range is set # Takes in", "* UR + np.cos(pos[1]) * LEFT + pos / 5", "color=BLUE ) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5, time_width=0.5) self.wait(1) self.play(stream_lines.end_animation()) \"\"\" if", "self.wait(1) self.play(stream_lines.end_animation()) \"\"\" if self.flow_animation is None: raise ValueError(\"You have", "* self.flow_speed line.anim.interpolate(min(line.time / line.anim.run_time, 1)) if alpha == 1:", "shift_vector: np.ndarray, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Shift a vector field", "lambda pos: (pos[0] * UR + pos[1] * LEFT) -", "point in start_points: points = [point] for _ in range(max_steps):", "size of the vector is passed, the returned value will", ") def outside_box(p): return ( p[0] < self.x_range[0] - self.padding", "self.x_range[2] or p[1] < self.y_range[0] - self.padding or p[1] >", "vector field. Parameters ---------- p The position of each point", "/ 2), ceil(config[\"frame_height\"] / 2), ] self.ranges = [self.x_range, self.y_range]", "it.product(x_range, y_range, z_range): self.add(self.get_vector(x * RIGHT + y * UP", "so that the creation animation starts at the same speed", "warm_up=True, flow_speed: float = 1, time_width: float = 0.3, rate_func:", "import Image from .. import config from ..animation.composition import AnimationGroup,", "mob.stream_lines: line.time += dt * flow_speed if line.time >= self.virtual_time:", "an image if the vector field uses a single color.\",", "turns True if z_range is not None. length_func The function", "first color of the gradient. max_color_scheme_value The value of the", "last_point = points[-1] new_point = last_point + dt * func(last_point)", "/ 6.0 * (k_1 + 2.0 * k_2 + 2.0", "line.points], ) line.set_rgba_array_direct( self.values_to_rgbas(norms, opacity), name=\"stroke_rgba\", ) else: if np.any(self.z_range", "pointwise: bool = False, ) -> \"VectorField\": \"\"\"Nudge a :class:`~.Mobject`", "Image.fromarray((rgbs * 255).astype(\"uint8\")) def get_vectorized_rgba_gradient_function( self, start: float, end: float,", "value scales the speed of this flow. time_width The proportion", "The stroke with of the stream lines. opacity The opacity", "self.func(p + step_size * k_3) return step_size / 6.0 *", "np.cos(pos[1]) * LEFT + pos / 5 stream_lines = StreamLines(", "p + runge_kutta(self, p, step_size)) else: mob.shift(runge_kutta(self, mob.get_center(), step_size)) return", "* RIGHT + y * UP + z * OUT))", "1, pointwise: bool = False, ) -> Callable[[Mobject, float], Mobject]:", "If undefined, the virtual time of the stream lines is", "the mobject along the vector field. If `False` the vector", "axis=1) # TODO why not y_array = y_array.repeat(...)? points_array[:, :,", "always based on a function defining the vector at every", "vector at every position. The values of this functions is", "line_animation_class The animation class being used Examples -------- .. manim::", "runtime of the whole animation might be longer due to", "dt / substeps for _ in range(substeps): if pointwise: mob.apply_function(lambda", "alpha): line.time += dt * self.flow_speed line.anim.interpolate(min(line.time / line.anim.run_time, 1))", "in generating an image if the vector field uses a", "= Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY)) dot = Dot().move_to(circle) vector_field.nudge(circle, -2, 60, True)", "the vector field. See :meth:`nudge` for details. Returns ------- Callable[[Mobject,", "/ sampling_rate) fw = config[\"frame_width\"] fh = config[\"frame_height\"] points_array =", "= False, ) -> \"VectorField\": \"\"\"Start continuously moving all submobjects", "Returns ------- VectorField This vector field. \"\"\" for mob in", "defining the color gradient of the vector field. x_range A", "new_point = last_point + dt * func(last_point) if outside_box(new_point): break", "- self.z_range[2] ) max_steps = ceil(virtual_time / dt) + 1", "self.add(line) self.stream_lines = [*self.submobjects] def create( self, lag_ratio: Optional[float] =", "func, x_range=[-7, 7, 1], y_range=[-4, 4, 1], length_func=lambda x: x", "StreamLineCreation class StreamLineCreation(Scene): def construct(self): func = lambda pos: (pos[0]", "1: mob.set_stroke(opacity=1) def finish_updater_cycle(line, alpha): line.time += dt * self.flow_speed", "manim:: SpawningAndFlowingArea :save_last_frame: class SpawningAndFlowingArea(Scene): def construct(self): func = lambda", "= None for line in self.stream_lines: create = Create( line,", "into. Higher values give more accurate approximations. pointwise Whether to", "along the vector field. If `False` the vector field takes", "Parameters ---------- sampling_rate The stepsize at which pixels get included", "Additional arguments to be passed to the :class:`~.VGroup` constructor Examples", "arrays representing rgba values \"\"\" rgbs = np.array([color_to_rgb(c) for c", "func = lambda pos: pos - LEFT * 5 colors", "/ 2, -fh / 2, ph) x_array = x_array.reshape((1, len(x_array)))", "construct(self): func = lambda pos: np.sin(pos[0] / 2) * UR", "> self.y_range[1] + self.padding - self.y_range[2] or p[2] < self.z_range[0]", "create, ), ) self.remove(line.anim.mobject) line.anim.finish() else: remaining_time = max_run_time -", "pointwise: mob.apply_function(lambda p: p + runge_kutta(self, p, step_size)) else: mob.shift(runge_kutta(self,", "per line. Lines with more anchors get reduced in complexity,", "to the :class:`~.Vector` constructor kwargs : Any Additional arguments to", "half_noise = self.noise_factor / 2 np.random.seed(0) start_points = np.array( [", "multiple times will result in removing the previous updater created", "bool = False, ) -> Callable[[Mobject, float], Mobject]: \"\"\"Get an", "func: Callable[[np.ndarray], np.ndarray], shift_vector: np.ndarray, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Shift", "= y_array.reshape((len(y_array), 1)) x_array = x_array.repeat(ph, axis=0) y_array.repeat(pw, axis=1) #", "func = VectorField.scale_func(func, 0.5) self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait() Returns ------- `Callable[[np.ndarray], np.ndarray]`", "the :class:`~.Vector` at every position. The values of this functions", "more anchors get reduced in complexity, not in length. padding", "self.noise_factor = ( noise_factor if noise_factor is not None else", "length_func=length_func) self.play(vf.animate.become(vf2)) self.wait() .. manim:: Coloring :save_last_frame: class Coloring(Scene): def", "lambda pos: pos - LEFT * 5 colors = [RED,", "self.x_range[1] + self.padding - self.x_range[2] or p[1] < self.y_range[0] -", "def scale_func( func: Callable[[np.ndarray], np.ndarray], scalar: float, ) -> Callable[[np.ndarray],", "field. The actual distance is based on the magnitude of", "value used for inverse interpolation at :func:`~.inverse_interpolate` end end value", "import Vector from ..mobject.mobject import Mobject from ..mobject.types.vectorized_mobject import VGroup,", "func, color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.length_func =", "= vector_config self.func = func x_range = np.arange(*self.x_range) y_range =", "position of each agent is altered along each axis. Defaults", "pos) / 3 self.add(ArrowVectorField(func)) .. manim:: SizingAndSpacing class SizingAndSpacing(Scene): def", "of the stream lines. opacity The opacity of the stream", "# better performance with fewer anchors ) self.play(stream_lines.create()) # uses", "bool = False, noise_factor: Optional[float] = None, n_repeats=1, # Determining", "* flow_speed if line.time >= self.virtual_time: line.time -= self.virtual_time line.anim.interpolate(np.clip(line.time", "of the vector field along its path. The speed value", "field takes effect on the points of the individual points", "is set # Takes in actual norm, spits out displayed", "1: self.remove(line.anim.mobject) line.anim.finish() max_run_time = self.virtual_time / self.flow_speed creation_rate_func =", "defining a vector field. shift_vector The shift to be applied", "each vector is determined by it's magnitude. Other color schemes", "the `lag_ratio`. If undefined, the virtual time of the stream", "* k_3) return step_size / 6.0 * (k_1 + 2.0", "to the `lag_ratio`. If undefined, the virtual time of the", "/ 2), ] self.ranges = [self.x_range, self.y_range] if three_dimensions or", "pointwise) return self def get_nudge_updater( self, speed: float = 1,", "Callable[[np.ndarray], np.ndarray]: \"\"\"Shift a vector field function. Parameters ---------- func", "not include any visible elements but provides methods to move", "= virtual_time self.max_anchors_per_line = max_anchors_per_line self.padding = padding self.stroke_width =", "DEFAULT_SCALAR_FIELD_COLORS, # Determining stream line starting positions: x_range: Sequence[float] =", "Create( line, run_time=creation_run_time, rate_func=creation_rate_func, ) if line.time <= 0: animations.append(", "the returned value will be used as display size for", "uses a single color.\", ) ph = int(config[\"pixel_height\"] / sampling_rate)", "by line. Otherwise it starts with all lines shown. flow_speed", "..animation.creation import Create from ..animation.indication import ShowPassingFlash from ..animation.update import", "math import ceil, floor from typing import Callable, Iterable, Optional,", "/ (1 + self.time_width) * creation_staring_speed ) # creation_run_time is", "/ 2), ceil(config[\"frame_width\"] / 2), ] self.y_range = y_range or", "fh = config[\"frame_height\"] points_array = np.zeros((ph, pw, 3)) x_array =", "field. Parameters ---------- func The function defining the rate of", "max_anchors_per_line self.padding = padding self.stroke_width = stroke_width half_noise = self.noise_factor", "z_range is not None. length_func The function determining the displayed", "StreamLines that is able to respond to changes in the", "to the :class:`~.Vector` constructor \"\"\" output = np.array(self.func(point)) norm =", "..animation.indication import ShowPassingFlash from ..animation.update import UpdateFromAlphaFunc from ..constants import", "---------- func The function defining a vector field. shift_vector The", "= lambda pos: ((pos[0] * UR + pos[1] * LEFT)", "vector field takes effect on the points of the individual", "is None: run_time = self.virtual_time if lag_ratio is None: lag_ratio", "np.cos(pos[1] / 2) * LEFT stream_lines = StreamLines(func, stroke_width=3, max_anchors_per_line=30)", "stream_lines.start_animation(warm_up=False, flow_speed=1.5) self.wait(stream_lines.virtual_time / stream_lines.flow_speed) \"\"\" for line in self.stream_lines:", "min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.length_func = length_func self.opacity =", "colors The colors defining the color gradient of the vector", "1)) if alpha == 1: self.remove(line.anim.mobject) line.anim.finish() max_run_time = self.virtual_time", "..utils.rate_functions import ease_out_sine, linear from ..utils.simple_functions import sigmoid from .types.opengl_vectorized_mobject", "is None: vector_config = {} self.vector_config = vector_config self.func =", "self.values_to_rgbas(norms, opacity), name=\"stroke_rgba\", ) else: if np.any(self.z_range != np.array([0, 0.5,", "self.max_anchors_per_line)) line.set_points_smoothly(points[::step]) if self.single_color: line.set_stroke(self.color) else: if config[\"renderer\"] == \"opengl\":", "mob.set_stroke(opacity=0) elif alpha == 1: mob.set_stroke(opacity=1) def finish_updater_cycle(line, alpha): line.time", "-------- .. manim:: BasicUsage :save_last_frame: class BasicUsage(Scene): def construct(self): func", "the vectors. The actual size of the vector is passed,", "k_3 = self.func(p + step_size * (k_2 * 0.5)) k_4", "self.ranges super().__init__( func, color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, )", "color is None: self.single_color = False if color_scheme is None:", "it import random from math import ceil, floor from typing", "Sequence[float] = None, three_dimensions: bool = False, noise_factor: Optional[float] =", "drawn dt=0.05, virtual_time=3, max_anchors_per_line=100, padding=3, # Determining stream line appearance:", "/ substeps for _ in range(substeps): if pointwise: mob.apply_function(lambda p:", "whole time gets simulated upon creation. max_anchors_per_line The maximum number", "pointwise: bool = False, ) -> \"VectorField\": \"\"\"Start continuously moving", "Variant of StreamLines that is able to respond to changes", "in length. padding The distance agents can move out of", "of the vectors. The actual size of the vector is", "a gradient of rgbas as a numpy array Parameters ----------", "lambda x: x / 3 vf2 = ArrowVectorField(func, x_range=[-7, 7,", "self.virtual_time: line.time -= self.virtual_time line.anim.interpolate(np.clip(line.time / line.anim.run_time, 0, 1)) self.add_updater(updater)", "value. This value gives the position in the color gradient", "the rate of change at every position of the `VectorField`.", "field. \"\"\" for mob in self.submobjects: self.nudge(mob, dt, substeps, pointwise)", "= scaled_alphas % 1 inter_alphas = inter_alphas.repeat(3).reshape((len(indices), 3)) result =", "- shift_vector) @staticmethod def scale_func( func: Callable[[np.ndarray], np.ndarray], scalar: float,", "in range(self.n_repeats) for x in np.arange(*self.x_range) for y in np.arange(*self.y_range)", "ScaleVectorFieldFunction(Scene): def construct(self): func = lambda pos: np.sin(pos[1]) * RIGHT", "self.submob_movement_updater = None return self def get_colored_background_image(self, sampling_rate: int =", ") if line.time <= 0: animations.append( Succession( UpdateFromAlphaFunc( line, hide_and_wait,", "point is shifted. \"\"\" k_1 = self.func(p) k_2 = self.func(p", "= lambda pos: np.sin(pos[0]) * UR + np.cos(pos[1]) * LEFT", "through a series of steps: Calculate the vector field function", "A sequence of z_min, z_max, delta_z three_dimensions Enables three_dimensions. Default", ":class:`VectorField` using the trace of moving agents. Vector fields are", "in range(max_steps): last_point = points[-1] new_point = last_point + dt", "= random.random() * self.virtual_time if warm_up: line.time *= -1 self.add(line.anim.mobject)", "-2, 60, True) vector_field.nudge(dot, -2, 60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater()) self.add(circle, dot)", "Tuple, Type import numpy as np from colour import Color", ".. manim:: Coloring :save_last_frame: class Coloring(Scene): def construct(self): func =", "line.duration / flow_speed line.anim = line_animation_class( line, run_time=run_time, rate_func=rate_func, time_width=time_width,", "class being used Examples -------- .. manim:: ContinuousMotion class ContinuousMotion(Scene):", "np.linspace(fh / 2, -fh / 2, ph) x_array = x_array.reshape((1,", "Sequence[float] = None, z_range: Sequence[float] = None, three_dimensions: bool =", "+ pos[1] * LEFT) - pos) / 3 self.add(StreamLines(func)) ..", "of StreamLines that is able to respond to changes in", "submobjects. Parameters ---------- dt A scalar to the amount the", "Parameters ---------- warm_up : bool, optional If `True` the animation", "= x_array.repeat(ph, axis=0) y_array.repeat(pw, axis=1) # TODO why not y_array", "or p[1] < self.y_range[0] - self.padding or p[1] > self.y_range[1]", "finally generate a color from that value using the color", "5) -> Image.Image: \"\"\"Generate an image that displays the vector", "= int(config[\"pixel_width\"] / sampling_rate) fw = config[\"frame_width\"] fh = config[\"frame_height\"]", "line.anim.interpolate(np.clip(line.time / line.anim.run_time, 0, 1)) self.add_updater(updater) self.flow_animation = updater self.flow_speed", "vector_config self.func = func x_range = np.arange(*self.x_range) y_range = np.arange(*self.y_range)", "output *= self.length_func(norm) / norm vect = Vector(output, **self.vector_config) vect.shift(point)", "a point is shifted in a single step. Returns -------", "mob.shift(runge_kutta(self, mob.get_center(), step_size)) return self def nudge_submobjects( self, dt: float", "* 5 colors = [RED, YELLOW, BLUE, DARK_GRAY] min_radius =", "individual points of the :class:`~.Mobject`, potentially distorting it. Returns -------", "finish_updater_cycle, run_time=remaining_time, ), create, ), ) return AnimationGroup(*animations) # TODO:", "numpy arrays representing rgba values \"\"\" rgbs = np.array([color_to_rgb(c) for", "= padding self.stroke_width = stroke_width half_noise = self.noise_factor / 2", "Optional[Callable[[np.ndarray], float]] = None, min_color_scheme_value: float = 0, max_color_scheme_value: float", "/ self.flow_speed animations.append( Succession( UpdateFromAlphaFunc( line, finish_updater_cycle, run_time=remaining_time, ), create,", "time_width: float = 0.3, rate_func: Callable[[float], float] = linear, line_animation_class:", "Coloring(Scene): def construct(self): func = lambda pos: pos - LEFT", "The scaled vector field function. \"\"\" return lambda p: func(p", "is displayed by moving many agents along the vector field", "self.ranges[i][2] self.x_range, self.y_range, self.z_range = self.ranges super().__init__( func, color, color_scheme,", "creation_staring_speed = creation_rate_func(0.001) * 1000 creation_run_time = ( max_run_time /", "color of the gradient. max_color_scheme_value The value of the color_scheme", "-> AnimationGroup: \"\"\"The creation animation of the stream lines. The", "Image.Imgae The vector field image. \"\"\" if self.single_color: raise ValueError(", "the vector field takes effect on the points of the", "name=\"stroke_rgba\", ) else: if np.any(self.z_range != np.array([0, 0.5, 0.5])): line.set_stroke(", "of a point along a vector field. Parameters ---------- p", "range(len(self.ranges)): if len(self.ranges[i]) == 2: self.ranges[i] += [0.5] self.ranges[i][1] +=", ":save_last_frame: class BasicUsage(Scene): def construct(self): func = lambda pos: ((pos[0]", "def nudge_submobjects( self, dt: float = 1, substeps: int =", "= None @staticmethod def shift_func( func: Callable[[np.ndarray], np.ndarray], shift_vector: np.ndarray,", "the gradient. colors The colors defining the color gradient of", "self.flow_animation is None: raise ValueError(\"You have to start the animation", "flow_speed: float = 1, time_width: float = 0.3, rate_func: Callable[[float],", "the magnitude of the vector field along its path. The", "field along its path. The speed value scales the speed", "np.array(self.func(point)) norm = np.linalg.norm(output) if norm != 0: output *=", "to the magnitude of the vector field along its path.", "- 1)] alpha %= 1 return interpolate(c1, c2, alpha) self.pos_to_rgb", "length_func: Callable[[float], float] = lambda norm: 0.45 * sigmoid(norm), opacity:", "alpha == 1: mob.set_stroke(opacity=1) def finish_updater_cycle(line, alpha): line.time += dt", "-> Tuple[float, float, float, float]: vec = self.func(pos) color_value =", "the vector field uses a single color.\", ) ph =", "the last color in `colors`. Higher values also result in", "np.cos(pos[0]) * UP vector_field = ArrowVectorField(func) self.add(vector_field) self.wait() func =", "1], opacity)), axis=1, ) return result return func class ArrowVectorField(VectorField):", "func, x_range=[-3, 3, 0.2], y_range=[-2, 2, 0.2], padding=1 ) spawning_area", "value scales the speed of such a mobject. pointwise Whether", "max_anchors_per_line The maximum number of anchors per line. Lines with", "0.5)) k_4 = self.func(p + step_size * k_3) return step_size", "shift_vector The shift to be applied to the vector field.", "Generates a gradient of rgbas as a numpy array Parameters", "= updater self.flow_speed = flow_speed self.time_width = time_width def end_animation(self)", "calculated so that the creation animation starts at the same", "int(len(points) / self.max_anchors_per_line)) line.set_points_smoothly(points[::step]) if self.single_color: line.set_stroke(self.color) else: if config[\"renderer\"]", "from ..animation.indication import ShowPassingFlash from ..animation.update import UpdateFromAlphaFunc from ..constants", "= lambda pos: np.sin(pos[0] / 2) * UR + np.cos(pos[1]", "if the vector field uses a single color.\", ) ph", "the stream lines is used as run time. Returns -------", "\"StreamLines\", ] import itertools as it import random from math", "field uses a single color.\", ) ph = int(config[\"pixel_height\"] /", "self.add(vf) self.wait() length_func = lambda x: x / 3 vf2", "of each stream line flashing line_animation_class The animation class being", "= False if color_scheme is None: def color_scheme(p): return np.linalg.norm(p)", "of agents generated at each starting point. dt The factor", "func The function defining a vector field. shift_vector The scalar", "RIGHT, UP from ..mobject.geometry import Vector from ..mobject.mobject import Mobject", "used to determine how much a point is shifted in", "step_size * (k_1 * 0.5)) k_3 = self.func(p + step_size", "at :func:`~.inverse_interpolate` end end value used for inverse interpolation at", "point in generating an image if the vector field uses", "move the mobject along the vector field. See :meth:`nudge` for", "Examples -------- .. manim:: StreamLineCreation class StreamLineCreation(Scene): def construct(self): func", "at each position is calculated by passing the positing through", "a vector to a single value. This value gives the", "speed, pointwise=pointwise) def start_submobject_movement( self, speed: float = 1, pointwise:", "of the color_scheme function to be mapped to the last", "of the stream line shown while being animated rate_func The", "for mob in self.submobjects: self.nudge(mob, dt, substeps, pointwise) return self", "inverse_interpolate(start, end, np.array(values)) alphas = np.clip(alphas, 0, 1) scaled_alphas =", "**kwargs, ) line.anim.begin() line.time = random.random() * self.virtual_time if warm_up:", "def start_animation( self, warm_up=True, flow_speed: float = 1, time_width: float", "self.add(vf, min_radius, max_radius) \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray],", "VectorField This vector field. \"\"\" for mob in self.submobjects: self.nudge(mob,", "np.array( [ (x - half_noise) * RIGHT + (y -", "y_range=[-4, 4, 1], stroke_width=3, virtual_time=1, # use shorter lines max_anchors_per_line=5,", "alpha == 1: self.remove(line.anim.mobject) line.anim.finish() max_run_time = self.virtual_time / self.flow_speed", "* UR + pos[1] * LEFT) - pos) / 3", "The colors defining the color gradient of the vector field.", "a :class:`VectorField` using the trace of moving agents. Vector fields", "all lines shown. flow_speed At `flow_speed=1` the distance the flow", "import numpy as np from colour import Color from PIL", "* RIGHT + np.cos(pos[0]) * UP vector_field = ArrowVectorField(func) self.add(vector_field)", "This vector field. Examples -------- .. manim:: Nudging class Nudging(Scene):", "1 if not self.single_color: self.background_img = self.get_colored_background_image() if config[\"renderer\"] ==", "vector_config: Optional[dict] = None, **kwargs ): self.x_range = x_range or", ":class:`~.Vector` at every position. The values of this functions is", "YELLOW, color_to_rgb, rgb_to_color from ..utils.deprecation import deprecated_params from ..utils.rate_functions import", "Parameters ---------- dt A scalar to the amount the mobject", "[ floor(-config[\"frame_height\"] / 2), ceil(config[\"frame_height\"] / 2), ] self.ranges =", "== \"opengl\": line = OpenGLVMobject() else: line = VMobject() line.duration", "vf = ArrowVectorField( func, min_color_scheme_value=2, max_color_scheme_value=10, colors=colors ) self.add(vf, min_radius,", "out.\") def hide_and_wait(mob, alpha): if alpha == 0: mob.set_stroke(opacity=0) elif", "out of the generation area before being terminated. stroke_width The", "class ContinuousMotion(Scene): def construct(self): func = lambda pos: np.sin(pos[0] /", "self.stop_submobject_movement() self.submob_movement_updater = lambda mob, dt: mob.nudge_submobjects( dt * speed,", "line flashing line_animation_class The animation class being used Examples --------", "the individual points of the :class:`~.Mobject`, potentially distorting it. Returns", "to a single value using `self.color_scheme` and finally generate a", "line, hide_and_wait, run_time=-line.time / self.flow_speed, ), create, ), ) self.remove(line.anim.mobject)", "shift_vector) @staticmethod def scale_func( func: Callable[[np.ndarray], np.ndarray], scalar: float, )", "random.random() * self.virtual_time if warm_up: line.time *= -1 self.add(line.anim.mobject) def", "1) indices = scaled_alphas.astype(int) next_indices = np.clip(indices + 1, 0,", "with more anchors get reduced in complexity, not in length.", "of the given :class:`~.Mobject`. If `True` the vector field takes", "---------- start start value used for inverse interpolation at :func:`~.inverse_interpolate`", "self.virtual_time line.anim.interpolate(np.clip(line.time / line.anim.run_time, 0, 1)) self.add_updater(updater) self.flow_animation = updater", "1, 0, len(rgbs) - 1) inter_alphas = scaled_alphas % 1", "compatibility with cairo line.set_stroke(width=self.stroke_width / 4.0) norms = np.array( [np.linalg.norm(self.func(point))", "np.array(list(map(color_to_rgb, colors))) def pos_to_rgb(pos: np.ndarray) -> Tuple[float, float, float, float]:", "/ 2, fw / 2, pw) y_array = np.linspace(fh /", "smoothly. Returns an animation resulting in fully displayed stream lines", "appearance: stroke_width=1, opacity=1, **kwargs ): self.x_range = x_range or [", "scalar to be applied to the vector field. Examples --------", "manim:: ContinuousMotion class ContinuousMotion(Scene): def construct(self): func = lambda pos:", "= np.array( [ (x - half_noise) * RIGHT + (y", "np.ndarray, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Shift a vector field function.", "started using :meth:`start_submobject_movement`. Returns ------- VectorField This vector field. \"\"\"", "z_range or self.y_range.copy() self.ranges += [self.z_range] else: self.ranges += [[0,", "float], Mobject]: \"\"\"Get an update function to move a :class:`~.Mobject`", "if alpha == 1: self.remove(line.anim.mobject) line.anim.finish() max_run_time = self.virtual_time /", "Dot().move_to(circle) vector_field.nudge(circle, -2, 60, True) vector_field.nudge(dot, -2, 60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater())", "also result in the last color of the gradient. colors", "self def nudge_submobjects( self, dt: float = 1, substeps: int", "at which pixels get included in the image. Lower values", "animation starts at the same speed # as the regular", "ArrowVectorField(func) self.add(vector_field) self.wait() func = VectorField.scale_func(func, 0.5) self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait() Returns", "result return func class ArrowVectorField(VectorField): \"\"\"A :class:`VectorField` represented by a", "float) -> float: \"\"\"Returns the change in position of a", "== 2: self.ranges[i] += [0.5] self.ranges[i][1] += self.ranges[i][2] self.x_range, self.y_range,", "to move in the vector field. Higher values therefore result", "= np.clip(indices + 1, 0, len(rgbs) - 1) inter_alphas =", "- LEFT * 5 colors = [RED, YELLOW, BLUE, DARK_GRAY]", "if z_range is not None. length_func The function determining the", "stream line creation. The runtime of the whole animation might", "2.5)] for lbl in labels: lbl.add_background_rectangle(opacity=0.6, buff=0.05) self.add(stream_lines, spawning_area, flowing_area,", "`Callable[[np.ndarray], np.ndarray]` The shifted vector field function. \"\"\" return lambda", "undefined, the virtual time of the stream lines is used", "\"\"\"A :class:`VectorField` represented by a set of change vectors. Vector", "line.get_anchors()], ) else: line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width, opacity=opacity) self.add(line) self.stream_lines = [*self.submobjects]", "func, color=YELLOW, x_range=[-7, 7, 1], y_range=[-4, 4, 1], stroke_width=3, virtual_time=1,", "= ArrowVectorField(func, x_range=[-7, 7, 1], length_func=length_func) self.play(vf.animate.become(vf2)) self.wait() .. manim::", "+= self.ranges[i][2] self.x_range, self.y_range, self.z_range = self.ranges super().__init__( func, color,", "to the last color in `colors`. Higher values also result", "import config from ..animation.composition import AnimationGroup, Succession from ..animation.creation import", "+ step_size * k_3) return step_size / 6.0 * (k_1", ".. import config from ..animation.composition import AnimationGroup, Succession from ..animation.creation", "starting position of each agent is altered along each axis.", "= color_scheme # TODO maybe other default for direction? self.rgbs", "-> Callable[[Mobject, float], Mobject]: \"\"\"Get an update function to move", "255).astype(\"uint8\")) def get_vectorized_rgba_gradient_function( self, start: float, end: float, colors: Iterable,", ":class:`~.AnimationGroup` The animation fading out the running stream animation. Raises", "DEFAULT_SCALAR_FIELD_COLORS, **kwargs ): super().__init__(**kwargs) self.func = func if color is", "5 stream_lines = StreamLines( func, x_range=[-3, 3, 0.2], y_range=[-2, 2,", "UP from ..mobject.geometry import Vector from ..mobject.mobject import Mobject from", "number of anchors per line. Lines with more anchors get", "returned value will be used as display size for the", "RIGHT + (y - half_noise) * UP + (z -", "Vector fields are based on a function defining a vector", "= alphas * (len(rgbs) - 1) indices = scaled_alphas.astype(int) next_indices", "dt * func(last_point) if outside_box(new_point): break points.append(new_point) step = max_steps", "floor(-config[\"frame_width\"] / 2), ceil(config[\"frame_width\"] / 2), ] self.y_range = y_range", "the continuous movement started using :meth:`start_submobject_movement`. Returns ------- VectorField This", "self.flow_speed line.anim.interpolate(min(line.time / line.anim.run_time, 1)) if alpha == 1: self.remove(line.anim.mobject)", "): super().__init__(**kwargs) self.func = func if color is None: self.single_color", "to move the mobject along the vector field. See :meth:`nudge`", "scaled_alphas = alphas * (len(rgbs) - 1) indices = scaled_alphas.astype(int)", "vect = Vector(output, **self.vector_config) vect.shift(point) if self.single_color: vect.set_color(self.color) else: vect.set_color(self.pos_to_color(point))", "rgb_to_color(self.pos_to_rgb(pos)) else: self.single_color = True self.color = color self.submob_movement_updater =", "y_range or [ floor(-config[\"frame_height\"] / 2), ceil(config[\"frame_height\"] / 2), ]", "The speed value scales the speed of this flow. time_width", "z * OUT)) self.set_opacity(self.opacity) def get_vector(self, point: np.ndarray): \"\"\"Creates a", "line.set_stroke(width=self.stroke_width, opacity=opacity) self.add(line) self.stream_lines = [*self.submobjects] def create( self, lag_ratio:", "update function to move a :class:`~.Mobject` along the vector field.", "func(p - shift_vector) @staticmethod def scale_func( func: Callable[[np.ndarray], np.ndarray], scalar:", "the vector field. The color at each position is calculated", "/ self.max_anchors_per_line)) line.set_points_smoothly(points[::step]) if self.single_color: line.set_stroke(self.color) else: if config[\"renderer\"] ==", "The speed value scales the speed of such a mobject.", "in the color gradient defined using `min_color_scheme_value`, `max_color_scheme_value` and `colors`.", "`max_color_scheme_value` and `colors`. min_color_scheme_value The value of the color_scheme function", "whole animation might be longer due to the `lag_ratio`. If", "scaled_alphas.astype(int) next_indices = np.clip(indices + 1, 0, len(rgbs) - 1)", "import AnimationGroup, Succession from ..animation.creation import Create from ..animation.indication import", ":class:`~.Vector` constructor \"\"\" output = np.array(self.func(point)) norm = np.linalg.norm(output) if", "*= len(self.rgbs) - 1 c1 = self.rgbs[int(alpha)] c2 = self.rgbs[min(int(alpha", "of the gradient. max_color_scheme_value The value of the color_scheme function", "= False, noise_factor: Optional[float] = None, n_repeats=1, # Determining how", "animation but eases out. dt = 1 / config[\"frame_rate\"] animations", "dt, substeps, pointwise) return self def get_nudge_updater( self, speed: float", "vector field. \"\"\" self.stop_submobject_movement() self.submob_movement_updater = lambda mob, dt: mob.nudge_submobjects(", "Callable[[float], float] = lambda norm: 0.45 * sigmoid(norm), opacity: float", "upon creation. max_anchors_per_line The maximum number of anchors per line.", "The function defining the rate of change at every position", "in the given point. Color and length fit the specifications", "flow_speed if line.time >= self.virtual_time: line.time -= self.virtual_time line.anim.interpolate(np.clip(line.time /", "1], y_range=[-4, 4, 1], length_func=lambda x: x / 2 )", "self.background_img = self.get_colored_background_image() if config[\"renderer\"] == \"opengl\": self.values_to_rgbas = self.get_vectorized_rgba_gradient_function(", "max_run_time = self.virtual_time / self.flow_speed creation_rate_func = ease_out_sine creation_staring_speed =", "at every position. The values of this functions is displayed", "return AnimationGroup(*animations) # TODO: Variant of StreamLines that is able", "class VectorField(VGroup): \"\"\"A vector field. Vector fields are based on", "Calling this method multiple times will result in removing the", "field. When used with :meth:`~.Mobject.add_updater`, the mobject will move along", "function. \"\"\" return lambda p: func(p - shift_vector) @staticmethod def", "the displayed size of vectors to reduce the clutter. opacity", "arguments to be passed to the :class:`~.VGroup` constructor \"\"\" def", "np.array(values)) alphas = np.clip(alphas, 0, 1) scaled_alphas = alphas *", "\"There is no point in generating an image if the", "end_animation(self) -> AnimationGroup: \"\"\"End the stream line animation smoothly. Returns", "by this method. Parameters ---------- speed The speed at which", "field, where its speed is determined by the magnitude of", "point is shifted in a single step. Returns ------- float", "vector field, where its speed is determined by the magnitude", "be passed to the :class:`~.Vector` constructor kwargs : Any Additional", "this functions is displayed as a grid of vectors. By", "the whole animation might be longer due to the `lag_ratio`.", "to be passed to the :class:`~.VGroup` constructor Examples -------- ..", "is determined by the magnitude of the vector field. Parameters", "= self.func(p + step_size * k_3) return step_size / 6.0", "of the vector field. substeps The amount of steps the", "float], Mobject] The update function. \"\"\" return lambda mob, dt:", "if three_dimensions or z_range: self.z_range = z_range or self.y_range.copy() self.ranges", "points.append(new_point) step = max_steps if not step: continue if config[\"renderer\"]", ") -> AnimationGroup: \"\"\"The creation animation of the stream lines.", "np.any(self.z_range != np.array([0, 0.5, 0.5])): line.set_stroke( [self.pos_to_color(p) for p in", "None: run_time = self.virtual_time if lag_ratio is None: lag_ratio =", "field. Calling this method multiple times will result in removing", "\"VectorField\": \"\"\"Apply a nudge along the vector field to all", "else: self.single_color = True self.color = color self.submob_movement_updater = None", "step_size A scalar that is used to determine how much", "time the agents get to move in the vector field.", "= [self.x_range, self.y_range] if three_dimensions or z_range: self.z_range = z_range", "> self.z_range[1] + self.padding - self.z_range[2] ) max_steps = ceil(virtual_time", "< self.z_range[0] - self.padding or p[2] > self.z_range[1] + self.padding", "colors defining the color gradient of the vector field. x_range", "2), ceil(config[\"frame_width\"] / 2), ] self.y_range = y_range or [", "True if z_range is set # Takes in actual norm,", "in colors]) def func(values, opacity=1): alphas = inverse_interpolate(start, end, np.array(values))", "the vector field, where its speed is determined by the", "== 0: mob.set_stroke(opacity=0) elif alpha == 1: mob.set_stroke(opacity=1) def finish_updater_cycle(line,", "with of the stream lines. opacity The opacity of the", "of this vector field. Parameters ---------- point The root point", "whole nudge is divided into. Higher values give more accurate", "agents get to move in the vector field. Higher values", "color of the vector field. If set, position-specific coloring is", "= 0, max_color_scheme_value: float = 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS,", "5 colors = [RED, YELLOW, BLUE, DARK_GRAY] min_radius = Circle(radius=2,", "VectorField This vector field. \"\"\" self.remove_updater(self.submob_movement_updater) self.submob_movement_updater = None return", "opacity if vector_config is None: vector_config = {} self.vector_config =", "fewer anchors ) self.play(stream_lines.create()) # uses virtual_time as run_time self.wait()", "alphas = inverse_interpolate(start, end, np.array(values)) alphas = np.clip(alphas, 0, 1)", "Callable[[np.ndarray], np.ndarray], scalar: float, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Scale a", "displayed by moving many agents along the vector field and", "np.arange(*self.x_range) y_range = np.arange(*self.y_range) z_range = np.arange(*self.z_range) for x, y,", "return lambda mob, dt: self.nudge(mob, dt * speed, pointwise=pointwise) def", "self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5) self.wait(stream_lines.virtual_time / stream_lines.flow_speed) \"\"\" for line in", "Returns ------- `Callable[[np.ndarray], np.ndarray]` The shifted vector field function. \"\"\"", "self.add(vector_field) self.wait() func = VectorField.scale_func(func, 0.5) self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait() Returns -------", "self.single_color: self.background_img = self.get_colored_background_image() if config[\"renderer\"] == \"opengl\": self.values_to_rgbas =", "move other :class:`~.Mobject` s along the vector field. Parameters ----------", "running Examples -------- .. manim:: EndAnimation class EndAnimation(Scene): def construct(self):", "(result, np.full([len(result), 1], opacity)), axis=1, ) return result return func", "np.concatenate( (result, np.full([len(result), 1], opacity)), axis=1, ) return result return", "dt The factor by which the distance an agent moves", "moves per step is stretched. Lower values result in a", "-------- .. manim:: ContinuousMotion class ContinuousMotion(Scene): def construct(self): func =", "factor by which the distance an agent moves per step", "as np from colour import Color from PIL import Image", "3)) x_array = np.linspace(-fw / 2, fw / 2, pw)", "The values of this functions is displayed as a grid", "pointwise: bool = False, ) -> \"VectorField\": \"\"\"Apply a nudge", "of the vector field and is rooted in the given", "lines is used as run time. Returns ------- :class:`~.AnimationGroup` The", "ValueError( \"There is no point in generating an image if", "at the same speed # as the regular line flash", "for details. Returns ------- VectorField This vector field. \"\"\" self.stop_submobject_movement()", "2, -fh / 2, ph) x_array = x_array.reshape((1, len(x_array))) y_array", "Optional[Callable[[float], float]] = None, **kwargs ) -> AnimationGroup: \"\"\"The creation", "lambda p: func(p * scalar) def nudge( self, mob: Mobject,", "colors defining the color gradient of the vector field. kwargs", "bool = False, ) -> \"VectorField\": \"\"\"Start continuously moving all", "---------- dt A scalar to the amount the mobject is", "stream lines will continuously flow Parameters ---------- warm_up : bool,", "complexity, not in length. padding The distance agents can move", "Examples -------- .. manim:: ContinuousMotion class ContinuousMotion(Scene): def construct(self): func", "start_points: points = [point] for _ in range(max_steps): last_point =", "[self.pos_to_color(p) for p in line.get_anchors()], ) else: line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width, opacity=opacity)", "image if the vector field uses a single color.\", )", "), ) return AnimationGroup(*animations) # TODO: Variant of StreamLines that", "None. length_func The function determining the displayed size of the", "None: def color_scheme(p): return np.linalg.norm(p) self.color_scheme = color_scheme # TODO", "(y - half_noise) * UP + (z - half_noise) *", "steps: Calculate the vector field function at that position, map", "ceil(config[\"frame_width\"] / 2), ] self.y_range = y_range or [ floor(-config[\"frame_height\"]", "default the color of each vector is determined by it's", "* k_3 + k_4) step_size = dt / substeps for", "applied to the vector field. Examples -------- .. manim:: ScaleVectorFieldFunction", "0: mob.set_stroke(opacity=0) elif alpha == 1: mob.set_stroke(opacity=1) def finish_updater_cycle(line, alpha):", "lambda pos: np.sin(pos[1]) * RIGHT + np.cos(pos[0]) * UP vector_field", "line, run_time=run_time, rate_func=rate_func, time_width=time_width, **kwargs, ) line.anim.begin() line.time = random.random()", "moving many agents along the vector field and showing their", "raise ValueError( \"There is no point in generating an image", "x_array = x_array.repeat(ph, axis=0) y_array.repeat(pw, axis=1) # TODO why not", "AnimationGroup: \"\"\"End the stream line animation smoothly. Returns an animation", "= lambda pos: pos - LEFT * 5 colors =", "color_to_rgb, rgb_to_color from ..utils.deprecation import deprecated_params from ..utils.rate_functions import ease_out_sine,", "to compute. Returns ------- Image.Imgae The vector field image. \"\"\"", "1 / config[\"frame_rate\"] animations = [] self.remove_updater(self.flow_animation) self.flow_animation = None", "0, len(rgbs) - 1) inter_alphas = scaled_alphas % 1 inter_alphas", "positing through a series of steps: Calculate the vector field", "function at that position, map that vector to a single", "vf2 = ArrowVectorField(func, x_range=[-7, 7, 1], length_func=length_func) self.play(vf.animate.become(vf2)) self.wait() ..", "= inter_alphas.repeat(3).reshape((len(indices), 3)) result = interpolate(rgbs[indices], rgbs[next_indices], inter_alphas) result =", "the `VectorField`. color The color of the vector field. If", "-> \"VectorField\": \"\"\"Stops the continuous movement started using :meth:`start_submobject_movement`. Returns", "max_color_scheme_value, colors, **kwargs, ) self.length_func = length_func self.opacity = opacity", "an image that displays the vector field. The color at", "OUT)) self.set_opacity(self.opacity) def get_vector(self, point: np.ndarray): \"\"\"Creates a vector in", "= None, run_time: Optional[Callable[[float], float]] = None, **kwargs ) ->", "line shown while being animated rate_func The rate function of", "np.ndarray], color: Optional[Color] = None, color_scheme: Optional[Callable[[np.ndarray], float]] = None,", "The distance agents can move out of the generation area", "on a function defining the :class:`~.Vector` at every position. The", "Coloring :save_last_frame: class Coloring(Scene): def construct(self): func = lambda pos:", "return lambda p: func(p - shift_vector) @staticmethod def scale_func( func:", "else: line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width, opacity=opacity) self.add(line) self.stream_lines = [*self.submobjects] def create(", "EndAnimation class EndAnimation(Scene): def construct(self): func = lambda pos: np.sin(pos[0]", "single value using `self.color_scheme` and finally generate a color from", "a grid of vectors. By default the color of each", "vector field and is rooted in the given point. Color", "self.stream_lines = [*self.submobjects] def create( self, lag_ratio: Optional[float] = None,", "1]) self.add(vf) self.wait() length_func = lambda x: x / 3", "self.single_color = True self.color = color self.submob_movement_updater = None @staticmethod", "vec = self.func(pos) color_value = np.clip( self.color_scheme(vec), min_color_scheme_value, max_color_scheme_value, )", "\"\"\" def runge_kutta(self, p: Sequence[float], step_size: float) -> float: \"\"\"Returns", "k_3) return step_size / 6.0 * (k_1 + 2.0 *", ":meth:`get_nudge_updater` for details. pointwise Whether to move the mobject along", "AnimationGroup(*animations) # TODO: Variant of StreamLines that is able to", "gets simulated upon creation. max_anchors_per_line The maximum number of anchors", "self.stream_lines ] random.shuffle(animations) return AnimationGroup(*animations, lag_ratio=lag_ratio) def start_animation( self, warm_up=True,", "`min_color_scheme_value`, `max_color_scheme_value` and `colors`. min_color_scheme_value The value of the color_scheme", "but may take a long time to compute. Returns -------", "+ pos[1] * LEFT) - pos) / 3 self.add(ArrowVectorField(func)) ..", "Determining how lines are drawn dt=0.05, virtual_time=3, max_anchors_per_line=100, padding=3, #", "class StreamLines(VectorField): \"\"\"StreamLines represent the flow of a :class:`VectorField` using", ".. manim:: StreamLineCreation class StreamLineCreation(Scene): def construct(self): func = lambda", "+= dt * flow_speed if line.time >= self.virtual_time: line.time -=", ":meth:`nudge` for details. Returns ------- VectorField This vector field. \"\"\"", "padding The distance agents can move out of the generation", "Additional arguments to be passed to the :class:`~.Vector` constructor \"\"\"", "determining the displayed size of the vectors. The actual size", "func = lambda pos: np.sin(pos[0]) * UR + np.cos(pos[1]) *", "Lines with more anchors get reduced in complexity, not in", "pos / 5 stream_lines = StreamLines( func, x_range=[-3, 3, 0.2],", "* speed, pointwise=pointwise) def start_submobject_movement( self, speed: float = 1,", "point. Color and length fit the specifications of this vector", "flowing_area = Rectangle(width=8, height=6) labels = [Tex(\"Spawning Area\"), Tex(\"Flowing Area\").shift(DOWN", "= None, three_dimensions: bool = False, # Automatically True if", "steps the whole nudge is divided into. Higher values give", "field image. \"\"\" if self.single_color: raise ValueError( \"There is no", "of each stream line creation. run_time The run time of", "-------- .. manim:: EndAnimation class EndAnimation(Scene): def construct(self): func =", "/ 2) * UR + np.cos(pos[1] / 2) * LEFT", "of steps the whole nudge is divided into. Higher values", "/ 3 self.add(StreamLines(func)) .. manim:: SpawningAndFlowingArea :save_last_frame: class SpawningAndFlowingArea(Scene): def", "for line in self.stream_lines ] random.shuffle(animations) return AnimationGroup(*animations, lag_ratio=lag_ratio) def", "[point] for _ in range(max_steps): last_point = points[-1] new_point =", "lambda pos: rgb_to_color(self.pos_to_rgb(pos)) else: self.single_color = True self.color = color", "updater. The stream lines will continuously flow Parameters ---------- warm_up", "terminated. stroke_width The stroke with of the stream lines. opacity", "super().__init__(**kwargs) self.func = func if color is None: self.single_color =", "max_color_scheme_value The value of the color_scheme function to be mapped", "get_vector(self, point: np.ndarray): \"\"\"Creates a vector in the vector field.", "None, run_time: Optional[Callable[[float], float]] = None, **kwargs ) -> AnimationGroup:", "of change at every position of the `VectorField`. color The", "Determining Vector positions: x_range: Sequence[float] = None, y_range: Sequence[float] =", "= [BLUE_E, GREEN, YELLOW, RED] class VectorField(VGroup): \"\"\"A vector field.", "creation animation of the stream lines. The stream lines appear", "float = 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, **kwargs ): super().__init__(**kwargs)", "= lambda pos: np.sin(pos[1]) * RIGHT + np.cos(pos[0]) * UP", "* OUT + self.noise_factor * np.random.random(3) for n in range(self.n_repeats)", "the clutter. opacity The opacity of the arrows. vector_config Additional", "in complexity, not in length. padding The distance agents can", "pos[1] * LEFT) - pos) / 3 self.add(StreamLines(func)) .. manim::", "`Callable[[np.ndarray], np.ndarray]` The scaled vector field function. \"\"\" return lambda", "this vector field. Parameters ---------- point The root point of", "height=6) labels = [Tex(\"Spawning Area\"), Tex(\"Flowing Area\").shift(DOWN * 2.5)] for", "color.\", ) ph = int(config[\"pixel_height\"] / sampling_rate) pw = int(config[\"pixel_width\"]", "is not None. noise_factor The amount by which the starting", "self.color_scheme(vec), min_color_scheme_value, max_color_scheme_value, ) alpha = inverse_interpolate( min_color_scheme_value, max_color_scheme_value, color_value,", "\"VectorField\": \"\"\"Nudge a :class:`~.Mobject` along the vector field. Parameters ----------", "to move the submobjects. See :meth:`get_nudge_updater` for details. pointwise Whether", "+ step_size * (k_2 * 0.5)) k_4 = self.func(p +", "lag_ratio is None: lag_ratio = run_time / 2 / len(self.submobjects)", "The animation fading out the running stream animation. Raises ------", "None return self def get_colored_background_image(self, sampling_rate: int = 5) ->", "/ 4.0) norms = np.array( [np.linalg.norm(self.func(point)) for point in line.points],", "float = 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining stream", "1.0, vector_config: Optional[dict] = None, **kwargs ): self.x_range = x_range", "..utils.color import BLUE_E, GREEN, RED, YELLOW, color_to_rgb, rgb_to_color from ..utils.deprecation", ":func:`~.inverse_interpolate` end end value used for inverse interpolation at :func:`~.inverse_interpolate`", "Examples -------- .. manim:: Nudging class Nudging(Scene): def construct(self): func", "why not y_array = y_array.repeat(...)? points_array[:, :, 0] = x_array", "in random order. Parameters ---------- lag_ratio The lag ratio of", "for compatibility with cairo line.set_stroke(width=self.stroke_width / 4.0) norms = np.array(", "outside_box(p): return ( p[0] < self.x_range[0] - self.padding or p[0]", "min_color_scheme_value The value of the color_scheme function to be mapped", "color of the gradient. colors The colors defining the color", "interpolate(c1, c2, alpha) self.pos_to_rgb = pos_to_rgb self.pos_to_color = lambda pos:", "self.submob_movement_updater = lambda mob, dt: mob.nudge_submobjects( dt * speed, pointwise=pointwise,", "/ 2) * LEFT stream_lines = StreamLines( func, stroke_width=3, max_anchors_per_line=5,", "\"opengl\": # scaled for compatibility with cairo line.set_stroke(width=self.stroke_width / 4.0)", "vector field. If set, position-specific coloring is disabled. color_scheme A", "self.padding - self.x_range[2] or p[1] < self.y_range[0] - self.padding or", "* (k_1 + 2.0 * k_2 + 2.0 * k_3", "LEFT vf = ArrowVectorField(func, x_range=[-7, 7, 1]) self.add(vf) self.wait() length_func", "-------- .. manim:: Nudging class Nudging(Scene): def construct(self): func =", "= [*self.submobjects] def create( self, lag_ratio: Optional[float] = None, run_time:", "z_max, delta_z three_dimensions Enables three_dimensions. Default set to False, automatically", "* LEFT) - pos) / 3 self.add(ArrowVectorField(func)) .. manim:: SizingAndSpacing", "np.clip( self.color_scheme(vec), min_color_scheme_value, max_color_scheme_value, ) alpha = inverse_interpolate( min_color_scheme_value, max_color_scheme_value,", "out the running stream animation. Raises ------ ValueError if no", "------- VectorField This vector field. Examples -------- .. manim:: Nudging", "vector field. If `False` the vector field takes effect on", "of vectors. By default the color of each vector is", "is determined by it's magnitude. Other color schemes can be", "line animation is running Examples -------- .. manim:: EndAnimation class", "colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining stream line starting positions:", "inverse_interpolate from ..utils.color import BLUE_E, GREEN, RED, YELLOW, color_to_rgb, rgb_to_color", "+ self.noise_factor * np.random.random(3) for n in range(self.n_repeats) for x", "Examples -------- .. manim:: ScaleVectorFieldFunction class ScaleVectorFieldFunction(Scene): def construct(self): func", "..utils.simple_functions import sigmoid from .types.opengl_vectorized_mobject import OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS: list =", "manim:: Nudging class Nudging(Scene): def construct(self): func = lambda pos:", "disabled. color_scheme A function mapping a vector to a single", "displayed size of vectors to reduce the clutter. opacity The", ":class:`~.AnimationGroup` The creation animation of the stream lines. Examples --------", "construct(self): func = lambda pos: (pos[0] * UR + pos[1]", "\"\"\" return lambda p: func(p - shift_vector) @staticmethod def scale_func(", "and length fit the specifications of this vector field. Parameters", "---------- mob The mobject to move along the vector field", "noticeable cut. Returns ------- :class:`~.AnimationGroup` The animation fading out the", "super().__init__( func, color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.length_func", "Enables three_dimensions. Default set to False, automatically turns True if", "field function. \"\"\" return lambda p: func(p - shift_vector) @staticmethod", "start the animation before fading it out.\") def hide_and_wait(mob, alpha):", "last color of the gradient. colors The colors defining the", "z_min, z_max, delta_z three_dimensions Enables three_dimensions. Default set to False,", "self.pos_to_rgb = pos_to_rgb self.pos_to_color = lambda pos: rgb_to_color(self.pos_to_rgb(pos)) else: self.single_color", "lambda mob, dt: self.nudge(mob, dt * speed, pointwise=pointwise) def start_submobject_movement(", "self.z_range[1] + self.padding - self.z_range[2] ) max_steps = ceil(virtual_time /", "False if color_scheme is None: def color_scheme(p): return np.linalg.norm(p) self.color_scheme", "2), ] self.ranges = [self.x_range, self.y_range] if three_dimensions or z_range:", "the distance a mobject moves per second is equal to", "for _ in range(max_steps): last_point = points[-1] new_point = last_point", "length_func=lambda x: x / 2 ) self.add(vector_field) circle = Circle(radius=2).shift(LEFT)", "Returns ------- `Callable[[np.ndarray], np.ndarray]` The scaled vector field function. \"\"\"", "Vector fields are always based on a function defining the", "False, automatically turns True if z_range is not None. noise_factor", "..mobject.types.vectorized_mobject import VGroup, VMobject from ..utils.bezier import interpolate, inverse_interpolate from", "speed The speed at which to move the submobjects. See", "s along the vector field. Parameters ---------- func The function", "2, points_array) return Image.fromarray((rgbs * 255).astype(\"uint8\")) def get_vectorized_rgba_gradient_function( self, start:", "to the vector field. Returns ------- `Callable[[np.ndarray], np.ndarray]` The shifted", "the points of the individual points of the :class:`~.Mobject`, potentially", "the previous updater created by this method. Parameters ---------- speed", "line. Lines with more anchors get reduced in complexity, not", "flow_speed At `flow_speed=1` the distance the flow moves per second", "constructor \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray], color: Optional[Color]", "a vector field. shift_vector The scalar to be applied to", "Otherwise it starts with all lines shown. flow_speed At `flow_speed=1`", "False, noise_factor: Optional[float] = None, n_repeats=1, # Determining how lines", "4.0) norms = np.array( [np.linalg.norm(self.func(point)) for point in line.points], )", "Returns an animation resulting in fully displayed stream lines without", "func = lambda pos: np.sin(pos[1] / 2) * RIGHT +", "dt * flow_speed if line.time >= self.virtual_time: line.time -= self.virtual_time", "virtual_time self.max_anchors_per_line = max_anchors_per_line self.padding = padding self.stroke_width = stroke_width", "= np.arange(*self.x_range) y_range = np.arange(*self.y_range) z_range = np.arange(*self.z_range) for x,", "k_1 = self.func(p) k_2 = self.func(p + step_size * (k_1", "result in longer stream lines. However, this whole time gets", ".. manim:: SpawningAndFlowingArea :save_last_frame: class SpawningAndFlowingArea(Scene): def construct(self): func =", "in np.arange(*self.y_range) for z in np.arange(*self.z_range) ], ) def outside_box(p):", "x_array.reshape((1, len(x_array))) y_array = y_array.reshape((len(y_array), 1)) x_array = x_array.repeat(ph, axis=0)", "interpolate, inverse_interpolate from ..utils.color import BLUE_E, GREEN, RED, YELLOW, color_to_rgb,", "of the generation area before being terminated. stroke_width The stroke", "ceil(virtual_time / dt) + 1 if not self.single_color: self.background_img =", "If undefined, it will be selected so that the total", "import Color from PIL import Image from .. import config", "updater created by this method. Parameters ---------- speed The speed", "used Examples -------- .. manim:: ContinuousMotion class ContinuousMotion(Scene): def construct(self):", "Determining stream line appearance: stroke_width=1, opacity=1, **kwargs ): self.x_range =", "field function at that position, map that vector to a", "function defining a vector field. shift_vector The scalar to be", "is None: lag_ratio = run_time / 2 / len(self.submobjects) animations", "vector field. step_size A scalar that is used to determine", "np from colour import Color from PIL import Image from", "itertools as it import random from math import ceil, floor", "self.add(self.get_vector(x * RIGHT + y * UP + z *", "function of each stream line flashing line_animation_class The animation class", "* RIGHT + (y - half_noise) * UP + (z", "If set, position-specific coloring is disabled. color_scheme A function mapping", "kwargs : Any Additional arguments to be passed to the", "to determine how much a point is shifted in a", "to the first color in `colors`. Lower values also result", "StreamLineCreation(Scene): def construct(self): func = lambda pos: (pos[0] * UR", "while being animated rate_func The rate function of each stream", "PIL import Image from .. import config from ..animation.composition import", "= self.func(p + step_size * (k_1 * 0.5)) k_3 =", "flow moves per second is equal to the magnitude of", ") alpha = inverse_interpolate( min_color_scheme_value, max_color_scheme_value, color_value, ) alpha *=", "x_range=[-7, 7, 1], length_func=length_func) self.play(vf.animate.become(vf2)) self.wait() .. manim:: Coloring :save_last_frame:", "spawning_area = Rectangle(width=6, height=4) flowing_area = Rectangle(width=8, height=6) labels =", "= 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, **kwargs ): super().__init__(**kwargs) self.func", "in the vector field. The created vector is based on", "the animation. If undefined, it will be selected so that", "= dt / substeps for _ in range(substeps): if pointwise:", "fit the specifications of this vector field. Parameters ---------- point", "stroke_width=3, max_anchors_per_line=5, virtual_time=1, color=BLUE ) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5, time_width=0.5) self.wait(1)", "length fit the specifications of this vector field. Parameters ----------", "set to False, automatically turns True if z_range is not", "change vectors. Vector fields are always based on a function", "if z_range is set # Takes in actual norm, spits", "generate the gradients as numpy arrays representing rgba values \"\"\"", "scaled for compatibility with cairo line.set_stroke(width=self.stroke_width / 4.0) norms =", "stream lines is used as run time. Returns ------- :class:`~.AnimationGroup`", "vector_field.nudge(circle, -2, 60, True) vector_field.nudge(dot, -2, 60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater()) self.add(circle,", "much the point is shifted. \"\"\" k_1 = self.func(p) k_2", "scalar: float, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Scale a vector field", "creation. The runtime of the whole animation might be longer", "number of agents generated at each starting point. dt The", "Parameters ---------- speed The speed at which to move the", "np.sin(pos[1] / 2) * RIGHT + np.cos(pos[0] / 2) *", "the position in the color gradient defined using `min_color_scheme_value`, `max_color_scheme_value`", "point of the vector. kwargs : Any Additional arguments to", "def construct(self): func = lambda pos: np.sin(pos[1]) * RIGHT +", "\"\"\"StreamLines represent the flow of a :class:`VectorField` using the trace", "determined by it's magnitude. Other color schemes can be used", "= StreamLines( func, stroke_width=3, max_anchors_per_line=5, virtual_time=1, color=BLUE ) self.add(stream_lines) stream_lines.start_animation(warm_up=False,", "= 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining stream line", "2) * LEFT vf = ArrowVectorField(func, x_range=[-7, 7, 1]) self.add(vf)", "included in the image. Lower values give more accurate results,", "Returns ------- function to generate the gradients as numpy arrays", "Optional[dict] = None, **kwargs ): self.x_range = x_range or [", "= points[-1] new_point = last_point + dt * func(last_point) if", "construct(self): func = lambda pos: np.sin(pos[1]) * RIGHT + np.cos(pos[0])", "self.y_range[1] + self.padding - self.y_range[2] or p[2] < self.z_range[0] -", "!= 0: output *= self.length_func(norm) / norm vect = Vector(output,", "= y_array rgbs = np.apply_along_axis(self.pos_to_rgb, 2, points_array) return Image.fromarray((rgbs *", "== \"opengl\": # scaled for compatibility with cairo line.set_stroke(width=self.stroke_width /", "position of a point along a vector field. Parameters ----------", "self.submobjects: self.nudge(mob, dt, substeps, pointwise) return self def get_nudge_updater( self,", "Examples -------- .. manim:: BasicUsage :save_last_frame: class BasicUsage(Scene): def construct(self):", "/ 2 / len(self.submobjects) animations = [ Create(line, run_time=run_time, **kwargs)", "\"\"\" for mob in self.submobjects: self.nudge(mob, dt, substeps, pointwise) return", "line.time >= self.virtual_time: line.time -= self.virtual_time line.anim.interpolate(np.clip(line.time / line.anim.run_time, 0,", "to be passed to the :class:`~.VGroup` constructor \"\"\" def __init__(", "\"\"\" if run_time is None: run_time = self.virtual_time if lag_ratio", "pos: rgb_to_color(self.pos_to_rgb(pos)) else: self.single_color = True self.color = color self.submob_movement_updater", "position. The values of this functions is displayed as a", "..animation.update import UpdateFromAlphaFunc from ..constants import OUT, RIGHT, UP from", "alpha == 0: mob.set_stroke(opacity=0) elif alpha == 1: mob.set_stroke(opacity=1) def", "YELLOW, RED] class VectorField(VGroup): \"\"\"A vector field. Vector fields are", "# Determining stream line starting positions: x_range: Sequence[float] = None,", "float, float]: vec = self.func(pos) color_value = np.clip( self.color_scheme(vec), min_color_scheme_value,", "the vector field. When used with :meth:`~.Mobject.add_updater`, the mobject will", "in self.stream_lines: create = Create( line, run_time=creation_run_time, rate_func=creation_rate_func, ) if", "= config[\"frame_width\"] fh = config[\"frame_height\"] points_array = np.zeros((ph, pw, 3))", "self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait() Returns ------- `Callable[[np.ndarray], np.ndarray]` The scaled vector field", "+ k_4) step_size = dt / substeps for _ in", "the vector field function at that position, map that vector", "2, pw) y_array = np.linspace(fh / 2, -fh / 2,", "lbl.add_background_rectangle(opacity=0.6, buff=0.05) self.add(stream_lines, spawning_area, flowing_area, *labels) \"\"\" def __init__( self,", "2, 0.2], padding=1 ) spawning_area = Rectangle(width=6, height=4) flowing_area =", "**kwargs, ) self.noise_factor = ( noise_factor if noise_factor is not", "step_size)) else: mob.shift(runge_kutta(self, mob.get_center(), step_size)) return self def nudge_submobjects( self,", "The color at each position is calculated by passing the", "self.func = func x_range = np.arange(*self.x_range) y_range = np.arange(*self.y_range) z_range", "that the total animation length is 1.5 times the run", "speed of this flow. time_width The proportion of the stream", "field. Parameters ---------- p The position of each point being", "provides methods to move other :class:`~.Mobject` s along the vector", ":class:`~.Vector` constructor kwargs : Any Additional arguments to be passed", "None, min_color_scheme_value: float = 0, max_color_scheme_value: float = 2, colors:", "7, 1]) self.add(vf) self.wait() length_func = lambda x: x /", "inverse_interpolate( min_color_scheme_value, max_color_scheme_value, color_value, ) alpha *= len(self.rgbs) - 1", "the vector field. See :meth:`nudge` for details. Returns ------- VectorField", "three_dimensions or z_range: self.z_range = z_range or self.y_range.copy() self.ranges +=", "p in line.get_anchors()], ) else: line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width, opacity=opacity) self.add(line) self.stream_lines", "False, ) -> \"VectorField\": \"\"\"Apply a nudge along the vector", "in start_points: points = [point] for _ in range(max_steps): last_point", "scaled vector field function. \"\"\" return lambda p: func(p *", "field. See :meth:`nudge` for details. Returns ------- VectorField This vector", "on the magnitude of the vector field. substeps The amount", "OUT + self.noise_factor * np.random.random(3) for n in range(self.n_repeats) for", "float = 0, max_color_scheme_value: float = 2, colors: Sequence[Color] =", "Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, **kwargs ): super().__init__(**kwargs) self.func = func if", "/ 3 vf2 = ArrowVectorField(func, x_range=[-7, 7, 1], length_func=length_func) self.play(vf.animate.become(vf2))", "is not None. length_func The function determining the displayed size", "vector field. \"\"\" for mob in self.submobjects: self.nudge(mob, dt, substeps,", "represented by a set of change vectors. Vector fields are", "or z_range: self.z_range = z_range or self.y_range.copy() self.ranges += [self.z_range]", "field. Examples -------- .. manim:: ScaleVectorFieldFunction class ScaleVectorFieldFunction(Scene): def construct(self):", "= [point] for _ in range(max_steps): last_point = points[-1] new_point", "each agent is altered along each axis. Defaults to :code:`delta_y", "points of the individual points of the :class:`~.Mobject`, potentially distorting", "lambda p: func(p - shift_vector) @staticmethod def scale_func( func: Callable[[np.ndarray],", "[BLUE_E, GREEN, YELLOW, RED] class VectorField(VGroup): \"\"\"A vector field. Vector", "colors]) def func(values, opacity=1): alphas = inverse_interpolate(start, end, np.array(values)) alphas", ") alpha *= len(self.rgbs) - 1 c1 = self.rgbs[int(alpha)] c2", "stream lines appear in random order. Parameters ---------- lag_ratio The", ") return AnimationGroup(*animations) # TODO: Variant of StreamLines that is", "path. The speed value scales the speed of such a", ") -> \"VectorField\": \"\"\"Apply a nudge along the vector field", "max_run_time - line.time / self.flow_speed animations.append( Succession( UpdateFromAlphaFunc( line, finish_updater_cycle,", "run_time=-line.time / self.flow_speed, ), create, ), ) self.remove(line.anim.mobject) line.anim.finish() else:", "-> \"VectorField\": \"\"\"Start continuously moving all submobjects along the vector", "lines. The stream lines appear in random order. Parameters ----------", "---------- lag_ratio The lag ratio of the animation. If undefined,", "from that value using the color gradient. Parameters ---------- sampling_rate", "if self.flow_animation is None: raise ValueError(\"You have to start the", "np.apply_along_axis(self.pos_to_rgb, 2, points_array) return Image.fromarray((rgbs * 255).astype(\"uint8\")) def get_vectorized_rgba_gradient_function( self,", "The number of agents generated at each starting point. dt", "* np.random.random(3) for n in range(self.n_repeats) for x in np.arange(*self.x_range)", "appear in random order. Parameters ---------- lag_ratio The lag ratio", "Circle(radius=10, color=colors[-1]).shift(LEFT * 5) vf = ArrowVectorField( func, min_color_scheme_value=2, max_color_scheme_value=10,", "to the amount the mobject is moved along the vector", "* 5) max_radius = Circle(radius=10, color=colors[-1]).shift(LEFT * 5) vf =", "3)) result = interpolate(rgbs[indices], rgbs[next_indices], inter_alphas) result = np.concatenate( (result,", "for p in line.get_anchors()], ) else: line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width, opacity=opacity) self.add(line)", "= 1, pointwise: bool = False, ) -> \"VectorField\": \"\"\"Apply", "lines max_anchors_per_line=5, # better performance with fewer anchors ) self.play(stream_lines.create())", "The animation class being used Examples -------- .. manim:: ContinuousMotion", "move the submobjects. See :meth:`get_nudge_updater` for details. pointwise Whether to", "the color_scheme function to be mapped to the last color", "update function. \"\"\" return lambda mob, dt: self.nudge(mob, dt *", "values \"\"\" rgbs = np.array([color_to_rgb(c) for c in colors]) def", "rgbs = np.array([color_to_rgb(c) for c in colors]) def func(values, opacity=1):", "line. Otherwise it starts with all lines shown. flow_speed At", "a single value using `self.color_scheme` and finally generate a color", "for point in start_points: points = [point] for _ in", "uses virtual_time as run_time self.wait() \"\"\" if run_time is None:", "the vector field takes effect on the center of the", "return step_size / 6.0 * (k_1 + 2.0 * k_2", "= False, # Automatically True if z_range is set #", "fully displayed stream lines without a noticeable cut. Returns -------", ":func:`~.inverse_interpolate` colors list of colors to generate the gradient Returns", "vector field. virtual_time The time the agents get to move", "area before being terminated. stroke_width The stroke with of the", "function defining the rate of change at every position of", "Area\"), Tex(\"Flowing Area\").shift(DOWN * 2.5)] for lbl in labels: lbl.add_background_rectangle(opacity=0.6,", "from ..utils.simple_functions import sigmoid from .types.opengl_vectorized_mobject import OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS: list", "alphas * (len(rgbs) - 1) indices = scaled_alphas.astype(int) next_indices =", "opacity The opacity of the arrows. vector_config Additional arguments to", "ShowPassingFlash, **kwargs ) -> None: \"\"\"Animates the stream lines using", "in removing the previous updater created by this method. Parameters", "..mobject.geometry import Vector from ..mobject.mobject import Mobject from ..mobject.types.vectorized_mobject import", "line.time <= 0: animations.append( Succession( UpdateFromAlphaFunc( line, hide_and_wait, run_time=-line.time /", "= n_repeats self.virtual_time = virtual_time self.max_anchors_per_line = max_anchors_per_line self.padding =", "The position of each point being moved along the vector", "fields are always based on a function defining the vector", "the same speed # as the regular line flash animation", "self def stop_submobject_movement(self) -> \"VectorField\": \"\"\"Stops the continuous movement started", "= max_anchors_per_line self.padding = padding self.stroke_width = stroke_width half_noise =", "self.x_range[0] - self.padding or p[0] > self.x_range[1] + self.padding -", "Additional arguments to be passed to the :class:`~.VGroup` constructor \"\"\"", "3 vf2 = ArrowVectorField(func, x_range=[-7, 7, 1], length_func=length_func) self.play(vf.animate.become(vf2)) self.wait()", "< self.y_range[0] - self.padding or p[1] > self.y_range[1] + self.padding", "updater(mob, dt): for line in mob.stream_lines: line.time += dt *", ") return result return func class ArrowVectorField(VectorField): \"\"\"A :class:`VectorField` represented", "vector field. Examples -------- .. manim:: Nudging class Nudging(Scene): def", "inverse interpolation at :func:`~.inverse_interpolate` end end value used for inverse", "# scaled for compatibility with cairo line.set_stroke(width=self.stroke_width / 4.0) norms", "are drawn dt=0.05, virtual_time=3, max_anchors_per_line=100, padding=3, # Determining stream line", "= lambda x: x / 3 vf2 = ArrowVectorField(func, x_range=[-7,", "None, color_scheme: Optional[Callable[[np.ndarray], float]] = None, min_color_scheme_value: float = 0,", "step is stretched. Lower values result in a better approximation", "None: lag_ratio = run_time / 2 / len(self.submobjects) animations =", "np.cos(pos[1] / 2) * LEFT stream_lines = StreamLines( func, stroke_width=3,", "starting positions: x_range: Sequence[float] = None, y_range: Sequence[float] = None,", "which pixels get included in the image. Lower values give", "None: \"\"\"Animates the stream lines using an updater. The stream", "the distance the flow moves per second is equal to", "defined using `min_color_scheme_value`, `max_color_scheme_value` and `colors`. min_color_scheme_value The value of", "by moving many agents along the vector field and showing", "vector field function. \"\"\" return lambda p: func(p - shift_vector)", "get_nudge_updater( self, speed: float = 1, pointwise: bool = False,", "arguments to be passed to the :class:`~.Vector` constructor kwargs :", "continuously flow Parameters ---------- warm_up : bool, optional If `True`", "Higher values also result in the last color of the", "rgba values \"\"\" rgbs = np.array([color_to_rgb(c) for c in colors])", "self.y_range[2] / 2 ) self.n_repeats = n_repeats self.virtual_time = virtual_time", "If `True` the vector field takes effect on the points", "last_point + dt * func(last_point) if outside_box(new_point): break points.append(new_point) step", "+ self.time_width) * creation_staring_speed ) # creation_run_time is calculated so", "Returns ------- Image.Imgae The vector field image. \"\"\" if self.single_color:", "is initialized line by line. Otherwise it starts with all", "+ (y - half_noise) * UP + (z - half_noise)", "2) * UR + np.cos(pos[1] / 2) * LEFT stream_lines", "much a point is shifted in a single step. Returns", "\"\"\"Mobjects representing vector fields.\"\"\" __all__ = [ \"VectorField\", \"ArrowVectorField\", \"StreamLines\",", "field and is rooted in the given point. Color and", "line.anim.finish() else: remaining_time = max_run_time - line.time / self.flow_speed animations.append(", "created by this method. Parameters ---------- speed The speed at", "vector field. Parameters ---------- point The root point of the", "self.y_range[2] or p[2] < self.z_range[0] - self.padding or p[2] >", "= z_range or self.y_range.copy() self.ranges += [self.z_range] else: self.ranges +=", "= self.rgbs[int(alpha)] c2 = self.rgbs[min(int(alpha + 1), len(self.rgbs) - 1)]", "\"\"\" if self.flow_animation is None: raise ValueError(\"You have to start", "of x_min, x_max, delta_x y_range A sequence of y_min, y_max,", "color gradient. Parameters ---------- sampling_rate The stepsize at which pixels", "= StreamLines( func, x_range=[-3, 3, 0.2], y_range=[-2, 2, 0.2], padding=1", "line.time = random.random() * self.virtual_time if warm_up: line.time *= -1", "func: Callable[[np.ndarray], np.ndarray], scalar: float, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Scale", "np.arange(*self.y_range) for z in np.arange(*self.z_range) ], ) def outside_box(p): return", "float]] = None, min_color_scheme_value: float = 0, max_color_scheme_value: float =", "the vector. By default this is used to cap the", "the vector field. kwargs : Any Additional arguments to be", "its path. The speed value scales the speed of this", "self.noise_factor * np.random.random(3) for n in range(self.n_repeats) for x in", "If `True` the animation is initialized line by line. Otherwise", "lambda mob, dt: mob.nudge_submobjects( dt * speed, pointwise=pointwise, ) self.add_updater(self.submob_movement_updater)", "length_func The function determining the displayed size of the vectors.", "= ceil(virtual_time / dt) + 1 if not self.single_color: self.background_img", "vector is based on the function of the vector field", "gradient defined using `min_color_scheme_value`, `max_color_scheme_value` and `colors`. min_color_scheme_value The value", "line.set_stroke(self.color) else: if config[\"renderer\"] == \"opengl\": # scaled for compatibility", "virtual time of the stream lines is used as run", "@staticmethod def scale_func( func: Callable[[np.ndarray], np.ndarray], scalar: float, ) ->", "the starting position of each agent is altered along each", "animations.append( Succession( UpdateFromAlphaFunc( line, hide_and_wait, run_time=-line.time / self.flow_speed, ), create,", "VectorField(VGroup): \"\"\"A vector field. Vector fields are based on a", "along the vector field and showing their trace. Parameters ----------", "ArrowVectorField( func, min_color_scheme_value=2, max_color_scheme_value=10, colors=colors ) self.add(vf, min_radius, max_radius) \"\"\"", "The value of the color_scheme function to be mapped to", "return Image.fromarray((rgbs * 255).astype(\"uint8\")) def get_vectorized_rgba_gradient_function( self, start: float, end:", "min_color_scheme_value=2, max_color_scheme_value=10, colors=colors ) self.add(vf, min_radius, max_radius) \"\"\" def __init__(", "vector_field = ArrowVectorField(func) self.add(vector_field) self.wait() func = VectorField.scale_func(func, 0.5) self.play(vector_field.animate.become(ArrowVectorField(func)))", ") self.n_repeats = n_repeats self.virtual_time = virtual_time self.max_anchors_per_line = max_anchors_per_line", "defining a vector field. shift_vector The scalar to be applied", "outside_box(new_point): break points.append(new_point) step = max_steps if not step: continue", "False, ) -> \"VectorField\": \"\"\"Start continuously moving all submobjects along", "along a vector field. Parameters ---------- p The position of", "if self.single_color: vect.set_color(self.color) else: vect.set_color(self.pos_to_color(point)) return vect class StreamLines(VectorField): \"\"\"StreamLines", "ScaleVectorFieldFunction class ScaleVectorFieldFunction(Scene): def construct(self): func = lambda pos: np.sin(pos[1])", "on a function defining a vector at every position. This", "details. Returns ------- VectorField This vector field. \"\"\" self.stop_submobject_movement() self.submob_movement_updater", "along the vector field. Calling this method multiple times will", "+ 2.0 * k_2 + 2.0 * k_3 + k_4)", "*= self.length_func(norm) / norm vect = Vector(output, **self.vector_config) vect.shift(point) if", "per second is equal to the magnitude of the vector", "animation resulting in fully displayed stream lines without a noticeable", "vector field. Parameters ---------- func The function defining the rate", "creation_staring_speed ) # creation_run_time is calculated so that the creation", "magnitude. Other color schemes can be used however. Parameters ----------", "Image from .. import config from ..animation.composition import AnimationGroup, Succession", "1, pointwise: bool = False, ) -> \"VectorField\": \"\"\"Start continuously", "return np.linalg.norm(p) self.color_scheme = color_scheme # TODO maybe other default", "typing import Callable, Iterable, Optional, Sequence, Tuple, Type import numpy", "The actual size of the vector is passed, the returned", "k_4 = self.func(p + step_size * k_3) return step_size /", "not y_array = y_array.repeat(...)? points_array[:, :, 0] = x_array points_array[:,", "in the last color of the gradient. colors The colors", "anchors ) self.play(stream_lines.create()) # uses virtual_time as run_time self.wait() \"\"\"", "ContinuousMotion(Scene): def construct(self): func = lambda pos: np.sin(pos[0] / 2)", "creation_rate_func(0.001) * 1000 creation_run_time = ( max_run_time / (1 +", "= np.array([color_to_rgb(c) for c in colors]) def func(values, opacity=1): alphas", "(z - half_noise) * OUT + self.noise_factor * np.random.random(3) for", "p The position of each point being moved along the", "n in range(self.n_repeats) for x in np.arange(*self.x_range) for y in", "sequence of x_min, x_max, delta_x y_range A sequence of y_min,", "self.values_to_rgbas = self.get_vectorized_rgba_gradient_function( min_color_scheme_value, max_color_scheme_value, colors, ) for point in", "max_anchors_per_line=100, padding=3, # Determining stream line appearance: stroke_width=1, opacity=1, **kwargs", "dt: mob.nudge_submobjects( dt * speed, pointwise=pointwise, ) self.add_updater(self.submob_movement_updater) return self", "the gradient Returns ------- function to generate the gradients as", "y, z in it.product(x_range, y_range, z_range): self.add(self.get_vector(x * RIGHT +", "self.add(stream_lines, spawning_area, flowing_area, *labels) \"\"\" def __init__( self, func: Callable[[np.ndarray],", "constructor Examples -------- .. manim:: BasicUsage :save_last_frame: class BasicUsage(Scene): def", "no point in generating an image if the vector field", "0, 1)) self.add_updater(updater) self.flow_animation = updater self.flow_speed = flow_speed self.time_width", "from ..utils.bezier import interpolate, inverse_interpolate from ..utils.color import BLUE_E, GREEN,", "along its path. The speed value scales the speed of", "self.noise_factor / 2 np.random.seed(0) start_points = np.array( [ (x -", "- self.y_range[2] or p[2] < self.z_range[0] - self.padding or p[2]", "takes effect on the center of the given :class:`~.Mobject`. If", "create, ), ) return AnimationGroup(*animations) # TODO: Variant of StreamLines", "more accurate approximations. pointwise Whether to move the mobject along", "BLUE_E, GREEN, RED, YELLOW, color_to_rgb, rgb_to_color from ..utils.deprecation import deprecated_params", "agents generated at each starting point. dt The factor by", "self, func: Callable[[np.ndarray], np.ndarray], color: Optional[Color] = None, color_scheme: Optional[Callable[[np.ndarray],", "color gradient of the vector field. x_range A sequence of", "stroke_width The stroke with of the stream lines. opacity The", "= Rectangle(width=6, height=4) flowing_area = Rectangle(width=8, height=6) labels = [Tex(\"Spawning", "is passed, the returned value will be used as display", "a vector field. Parameters ---------- p The position of each", ") -> Callable[[np.ndarray], np.ndarray]: \"\"\"Scale a vector field function. Parameters", "finish_updater_cycle(line, alpha): line.time += dt * self.flow_speed line.anim.interpolate(min(line.time / line.anim.run_time,", "OUT, RIGHT, UP from ..mobject.geometry import Vector from ..mobject.mobject import", "selected so that the total animation length is 1.5 times", "of the stream lines. Examples -------- .. manim:: StreamLineCreation class", "per step is stretched. Lower values result in a better", "of every single stream line creation. The runtime of the", "as a numpy array Parameters ---------- start start value used", "The vector field image. \"\"\" if self.single_color: raise ValueError( \"There", "and finally generate a color from that value using the", "along each axis. Defaults to :code:`delta_y / 2` if not", "if len(self.ranges[i]) == 2: self.ranges[i] += [0.5] self.ranges[i][1] += self.ranges[i][2]", "= self.get_vectorized_rgba_gradient_function( min_color_scheme_value, max_color_scheme_value, colors, ) for point in start_points:", "it's magnitude. Other color schemes can be used however. Parameters", ") max_steps = ceil(virtual_time / dt) + 1 if not", "nudge along the vector field to all submobjects. Parameters ----------", "removing the previous updater created by this method. Parameters ----------", "position, map that vector to a single value using `self.color_scheme`", "generate a color from that value using the color gradient.", "StreamLines(func, stroke_width=3, max_anchors_per_line=30) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5) self.wait(stream_lines.virtual_time / stream_lines.flow_speed) \"\"\"", "ValueError if no stream line animation is running Examples --------", "np.ndarray) -> Tuple[float, float, float, float]: vec = self.func(pos) color_value", "sequence of z_min, z_max, delta_z three_dimensions Enables three_dimensions. Default set", "by the magnitude of the vector field. Parameters ---------- speed", "of the :class:`~.Mobject`, potentially distorting it. Returns ------- VectorField This", "4, 1], length_func=lambda x: x / 2 ) self.add(vector_field) circle", "c in colors]) def func(values, opacity=1): alphas = inverse_interpolate(start, end,", "or [ floor(-config[\"frame_width\"] / 2), ceil(config[\"frame_width\"] / 2), ] self.y_range", "of each point being moved along the vector field. step_size", "* UR + pos[1] * LEFT) - pos stream_lines =", "the creation animation starts at the same speed # as", "self.flow_speed, ), create, ), ) self.remove(line.anim.mobject) line.anim.finish() else: remaining_time =", "line starting positions: x_range: Sequence[float] = None, y_range: Sequence[float] =", "lambda pos: np.sin(pos[0] / 2) * UR + np.cos(pos[1] /", "Any Additional arguments to be passed to the :class:`~.Vector` constructor", "0, max_color_scheme_value: float = 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, #", "pos) / 3 self.add(StreamLines(func)) .. manim:: SpawningAndFlowingArea :save_last_frame: class SpawningAndFlowingArea(Scene):", "function mapping a vector to a single value. This value", ") self.add_updater(self.submob_movement_updater) return self def stop_submobject_movement(self) -> \"VectorField\": \"\"\"Stops the", "if warm_up: line.time *= -1 self.add(line.anim.mobject) def updater(mob, dt): for", "array Parameters ---------- start start value used for inverse interpolation", "At `speed=1` the distance a mobject moves per second is", "bool = False, ) -> \"VectorField\": \"\"\"Apply a nudge along", "k_4) step_size = dt / substeps for _ in range(substeps):", "self.add(circle, dot) self.wait(6) \"\"\" def runge_kutta(self, p: Sequence[float], step_size: float)", "create( self, lag_ratio: Optional[float] = None, run_time: Optional[Callable[[float], float]] =", "is divided into. Higher values give more accurate approximations. pointwise", "end value used for inverse interpolation at :func:`~.inverse_interpolate` colors list", "self def get_colored_background_image(self, sampling_rate: int = 5) -> Image.Image: \"\"\"Generate", "movement started using :meth:`start_submobject_movement`. Returns ------- VectorField This vector field.", "is shifted. \"\"\" k_1 = self.func(p) k_2 = self.func(p +", "using `self.color_scheme` and finally generate a color from that value", "[ (x - half_noise) * RIGHT + (y - half_noise)", "inter_alphas = inter_alphas.repeat(3).reshape((len(indices), 3)) result = interpolate(rgbs[indices], rgbs[next_indices], inter_alphas) result", "get to move in the vector field. Higher values therefore", "along the vector field. step_size A scalar that is used", "pos: (pos[0] * UR + pos[1] * LEFT) - pos", "move in the vector field. Higher values therefore result in", "representing vector fields.\"\"\" __all__ = [ \"VectorField\", \"ArrowVectorField\", \"StreamLines\", ]", "output = np.array(self.func(point)) norm = np.linalg.norm(output) if norm != 0:", "self, lag_ratio: Optional[float] = None, run_time: Optional[Callable[[float], float]] = None,", "result = np.concatenate( (result, np.full([len(result), 1], opacity)), axis=1, ) return", "points[-1] new_point = last_point + dt * func(last_point) if outside_box(new_point):", "a noticeable cut. Returns ------- :class:`~.AnimationGroup` The animation fading out", "shift to be applied to the vector field. Returns -------", ":class:`~.Mobject` along the vector field. When used with :meth:`~.Mobject.add_updater`, the", "None, three_dimensions: bool = False, noise_factor: Optional[float] = None, n_repeats=1,", "agent moves per step is stretched. Lower values result in", "stream line appearance: stroke_width=1, opacity=1, **kwargs ): self.x_range = x_range", "at which to move the submobjects. See :meth:`get_nudge_updater` for details.", "as run_time self.wait() \"\"\" if run_time is None: run_time =", "the stream lines. Examples -------- .. manim:: StreamLineCreation class StreamLineCreation(Scene):", "However, this whole time gets simulated upon creation. max_anchors_per_line The", "= self.func(p + step_size * (k_2 * 0.5)) k_4 =", "self.ranges[i] += [0.5] self.ranges[i][1] += self.ranges[i][2] self.x_range, self.y_range, self.z_range =", "def runge_kutta(self, p: Sequence[float], step_size: float) -> float: \"\"\"Returns the", "x_min, x_max, delta_x y_range A sequence of y_min, y_max, delta_y", "of the vector field. kwargs : Any Additional arguments to", "represent the flow of a :class:`VectorField` using the trace of", "if outside_box(new_point): break points.append(new_point) step = max_steps if not step:", "/ len(self.submobjects) animations = [ Create(line, run_time=run_time, **kwargs) for line", "default this is used to cap the displayed size of", "3 self.add(StreamLines(func)) .. manim:: SpawningAndFlowingArea :save_last_frame: class SpawningAndFlowingArea(Scene): def construct(self):", "> self.x_range[1] + self.padding - self.x_range[2] or p[1] < self.y_range[0]", "self.time_width) * creation_staring_speed ) # creation_run_time is calculated so that", "end, np.array(values)) alphas = np.clip(alphas, 0, 1) scaled_alphas = alphas", "self.flow_speed animations.append( Succession( UpdateFromAlphaFunc( line, finish_updater_cycle, run_time=remaining_time, ), create, ),", "ceil, floor from typing import Callable, Iterable, Optional, Sequence, Tuple,", "c1 = self.rgbs[int(alpha)] c2 = self.rgbs[min(int(alpha + 1), len(self.rgbs) -", "import random from math import ceil, floor from typing import", "vector field. See :meth:`nudge` for details. Returns ------- VectorField This", "is based on the function of the vector field and", "if self.single_color: raise ValueError( \"There is no point in generating", "int = 5) -> Image.Image: \"\"\"Generate an image that displays", "the flow of a :class:`VectorField` using the trace of moving", "field. color The color of the vector field. If set,", "substeps, pointwise) return self def get_nudge_updater( self, speed: float =", "(k_2 * 0.5)) k_4 = self.func(p + step_size * k_3)", "self.remove_updater(self.submob_movement_updater) self.submob_movement_updater = None return self def get_colored_background_image(self, sampling_rate: int", "max_color_scheme_value: float = 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining", "from ..utils.deprecation import deprecated_params from ..utils.rate_functions import ease_out_sine, linear from", "max_steps = ceil(virtual_time / dt) + 1 if not self.single_color:", "colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining Vector positions: x_range: Sequence[float]", "gradient of the vector field. x_range A sequence of x_min,", "Vector positions: x_range: Sequence[float] = None, y_range: Sequence[float] = None,", ": Any Additional arguments to be passed to the :class:`~.VGroup`", "vector field. \"\"\" self.remove_updater(self.submob_movement_updater) self.submob_movement_updater = None return self def", "stream line animation smoothly. Returns an animation resulting in fully", "from ..animation.composition import AnimationGroup, Succession from ..animation.creation import Create from", "func, color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.noise_factor =", "`flow_speed=1` the distance the flow moves per second is equal", "accurate results, but may take a long time to compute.", "\"\"\"Returns the change in position of a point along a", "DARK_GRAY] min_radius = Circle(radius=2, color=colors[0]).shift(LEFT * 5) max_radius = Circle(radius=10,", "break points.append(new_point) step = max_steps if not step: continue if", "from ..animation.creation import Create from ..animation.indication import ShowPassingFlash from ..animation.update", "norm = np.linalg.norm(output) if norm != 0: output *= self.length_func(norm)", "default for direction? self.rgbs = np.array(list(map(color_to_rgb, colors))) def pos_to_rgb(pos: np.ndarray)", "self.wait(stream_lines.virtual_time / stream_lines.flow_speed) \"\"\" for line in self.stream_lines: run_time =", "mob: Mobject, dt: float = 1, substeps: int = 1,", "animation might be longer due to the `lag_ratio`. If undefined,", "self.func(p) k_2 = self.func(p + step_size * (k_1 * 0.5))", "%= 1 return interpolate(c1, c2, alpha) self.pos_to_rgb = pos_to_rgb self.pos_to_color", "Calculate the vector field function at that position, map that", "along the vector field. Parameters ---------- func The function defining", "-fh / 2, ph) x_array = x_array.reshape((1, len(x_array))) y_array =", "Area\").shift(DOWN * 2.5)] for lbl in labels: lbl.add_background_rectangle(opacity=0.6, buff=0.05) self.add(stream_lines,", "/ 2 np.random.seed(0) start_points = np.array( [ (x - half_noise)", "to generate the gradient Returns ------- function to generate the", "UP vector_field = ArrowVectorField( func, x_range=[-7, 7, 1], y_range=[-4, 4,", "self.wait() func = VectorField.scale_func(func, 0.5) self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait() Returns ------- `Callable[[np.ndarray],", "stream animation. Raises ------ ValueError if no stream line animation", "return self def get_nudge_updater( self, speed: float = 1, pointwise:", "effect on the center of the given :class:`~.Mobject`. If `True`", "[np.linalg.norm(self.func(point)) for point in line.points], ) line.set_rgba_array_direct( self.values_to_rgbas(norms, opacity), name=\"stroke_rgba\",", "/ sampling_rate) pw = int(config[\"pixel_width\"] / sampling_rate) fw = config[\"frame_width\"]", "fields are always based on a function defining the :class:`~.Vector`", "for details. Returns ------- VectorField This vector field. \"\"\" for", "speed of such a mobject. pointwise Whether to move the", "line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width, opacity=opacity) self.add(line) self.stream_lines = [*self.submobjects] def create( self,", ":meth:`start_submobject_movement`. Returns ------- VectorField This vector field. \"\"\" self.remove_updater(self.submob_movement_updater) self.submob_movement_updater", "indices = scaled_alphas.astype(int) next_indices = np.clip(indices + 1, 0, len(rgbs)", "the color gradient of the vector field. x_range A sequence", "along the vector field to all submobjects. Parameters ---------- dt", ":class:`~.VGroup` constructor Examples -------- .. manim:: BasicUsage :save_last_frame: class BasicUsage(Scene):", "class ScaleVectorFieldFunction(Scene): def construct(self): func = lambda pos: np.sin(pos[1]) *", "step_size / 6.0 * (k_1 + 2.0 * k_2 +", "line.set_points_smoothly(points[::step]) if self.single_color: line.set_stroke(self.color) else: if config[\"renderer\"] == \"opengl\": #", "warm_up : bool, optional If `True` the animation is initialized", "to be applied to the vector field. Examples -------- ..", "SpawningAndFlowingArea(Scene): def construct(self): func = lambda pos: np.sin(pos[0]) * UR", "field. shift_vector The shift to be applied to the vector", "based on the magnitude of the vector field. substeps The", "self.color_scheme = color_scheme # TODO maybe other default for direction?", "displayed as a grid of vectors. By default the color", "z in np.arange(*self.z_range) ], ) def outside_box(p): return ( p[0]", "to False, automatically turns True if z_range is not None.", "to the :class:`~.VGroup` constructor \"\"\" def __init__( self, func: Callable[[np.ndarray],", "Higher values give more accurate approximations. pointwise Whether to move", "bool = False, # Automatically True if z_range is set", "return self def stop_submobject_movement(self) -> \"VectorField\": \"\"\"Stops the continuous movement", "for n in range(self.n_repeats) for x in np.arange(*self.x_range) for y", "creation animation of the stream lines. Examples -------- .. manim::", "self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5, time_width=0.5) self.wait(1) self.play(stream_lines.end_animation()) \"\"\" if self.flow_animation is", "positions: x_range: Sequence[float] = None, y_range: Sequence[float] = None, z_range:", "None for line in self.stream_lines: create = Create( line, run_time=creation_run_time,", "pos_to_rgb(pos: np.ndarray) -> Tuple[float, float, float, float]: vec = self.func(pos)", "start: float, end: float, colors: Iterable, ): \"\"\" Generates a", "or p[0] > self.x_range[1] + self.padding - self.x_range[2] or p[1]", "field. If set, position-specific coloring is disabled. color_scheme A function", "(x - half_noise) * RIGHT + (y - half_noise) *", "the color of each vector is determined by it's magnitude.", "= None, z_range: Sequence[float] = None, three_dimensions: bool = False,", "self.flow_animation = updater self.flow_speed = flow_speed self.time_width = time_width def", "not self.single_color: self.background_img = self.get_colored_background_image() if config[\"renderer\"] == \"opengl\": self.values_to_rgbas", "RIGHT + np.cos(pos[0]) * UP vector_field = ArrowVectorField(func) self.add(vector_field) self.wait()", "on a function defining the vector at every position. The", "% 1 inter_alphas = inter_alphas.repeat(3).reshape((len(indices), 3)) result = interpolate(rgbs[indices], rgbs[next_indices],", "Vector from ..mobject.mobject import Mobject from ..mobject.types.vectorized_mobject import VGroup, VMobject", "This class does by default not include any visible elements", "= np.clip(alphas, 0, 1) scaled_alphas = alphas * (len(rgbs) -", "A function mapping a vector to a single value. This", "linear, line_animation_class: Type[ShowPassingFlash] = ShowPassingFlash, **kwargs ) -> None: \"\"\"Animates", "- 1) indices = scaled_alphas.astype(int) next_indices = np.clip(indices + 1,", "] self.y_range = y_range or [ floor(-config[\"frame_height\"] / 2), ceil(config[\"frame_height\"]", "moved along the vector field. The actual distance is based", "submobjects. See :meth:`get_nudge_updater` for details. pointwise Whether to move the", "distorting it. Returns ------- VectorField This vector field. Examples --------", "Automatically True if z_range is set # Takes in actual", "amount the mobject is moved along the vector field. The", "\"\"\" Generates a gradient of rgbas as a numpy array", "of the animation. If undefined, it will be selected so", "min_color_scheme_value, max_color_scheme_value, color_value, ) alpha *= len(self.rgbs) - 1 c1", "scale_func( func: Callable[[np.ndarray], np.ndarray], scalar: float, ) -> Callable[[np.ndarray], np.ndarray]:", "= lambda mob, dt: mob.nudge_submobjects( dt * speed, pointwise=pointwise, )", "function of the vector field and is rooted in the", "-> AnimationGroup: \"\"\"End the stream line animation smoothly. Returns an", "This vector field. \"\"\" self.remove_updater(self.submob_movement_updater) self.submob_movement_updater = None return self", "lambda pos: ((pos[0] * UR + pos[1] * LEFT) -", "get_colored_background_image(self, sampling_rate: int = 5) -> Image.Image: \"\"\"Generate an image", "config[\"frame_width\"] fh = config[\"frame_height\"] points_array = np.zeros((ph, pw, 3)) x_array", "/ 2, ph) x_array = x_array.reshape((1, len(x_array))) y_array = y_array.reshape((len(y_array),", "step_size * (k_2 * 0.5)) k_4 = self.func(p + step_size", "/ flow_speed line.anim = line_animation_class( line, run_time=run_time, rate_func=rate_func, time_width=time_width, **kwargs,", "points of the :class:`~.Mobject`, potentially distorting it. Returns ------- VectorField", "of each vector is determined by it's magnitude. Other color", "color_scheme # TODO maybe other default for direction? self.rgbs =", "position-specific coloring is disabled. color_scheme A function mapping a vector", "field function. Parameters ---------- func The function defining a vector", "flow_speed=1.5, time_width=0.5) self.wait(1) self.play(stream_lines.end_animation()) \"\"\" if self.flow_animation is None: raise", "= np.linalg.norm(output) if norm != 0: output *= self.length_func(norm) /", "to be mapped to the last color in `colors`. Higher", "= np.array(list(map(color_to_rgb, colors))) def pos_to_rgb(pos: np.ndarray) -> Tuple[float, float, float,", "------- VectorField This vector field. \"\"\" self.remove_updater(self.submob_movement_updater) self.submob_movement_updater = None", "**kwargs ) -> AnimationGroup: \"\"\"The creation animation of the stream", "# as the regular line flash animation but eases out.", "animation is initialized line by line. Otherwise it starts with", "every single stream line creation. The runtime of the whole", "half_noise) * UP + (z - half_noise) * OUT +", "pointwise: bool = False, ) -> Callable[[Mobject, float], Mobject]: \"\"\"Get", "y_array = np.linspace(fh / 2, -fh / 2, ph) x_array", "return AnimationGroup(*animations, lag_ratio=lag_ratio) def start_animation( self, warm_up=True, flow_speed: float =", "if run_time is None: run_time = self.virtual_time if lag_ratio is", "manim:: SizingAndSpacing class SizingAndSpacing(Scene): def construct(self): func = lambda pos:", "= lambda pos: np.sin(pos[1] / 2) * RIGHT + np.cos(pos[0]", "The function defining a vector field. shift_vector The scalar to", "p, step_size)) else: mob.shift(runge_kutta(self, mob.get_center(), step_size)) return self def nudge_submobjects(", "UR + pos[1] * LEFT) - pos) / 3 self.add(StreamLines(func))", ".. manim:: ContinuousMotion class ContinuousMotion(Scene): def construct(self): func = lambda", "are always based on a function defining the vector at", "from ..utils.color import BLUE_E, GREEN, RED, YELLOW, color_to_rgb, rgb_to_color from", "of the vector is passed, the returned value will be", "Sequence, Tuple, Type import numpy as np from colour import", "stream_lines.start_animation(warm_up=False, flow_speed=1.5, time_width=0.5) self.wait(1) self.play(stream_lines.end_animation()) \"\"\" if self.flow_animation is None:", ":save_last_frame: class Coloring(Scene): def construct(self): func = lambda pos: pos", "The amount by which the starting position of each agent", "self.pos_to_color = lambda pos: rgb_to_color(self.pos_to_rgb(pos)) else: self.single_color = True self.color", "colour import Color from PIL import Image from .. import", "\"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray], color: Optional[Color] =", "in self.stream_lines: run_time = line.duration / flow_speed line.anim = line_animation_class(", "get reduced in complexity, not in length. padding The distance", "defined. n_repeats The number of agents generated at each starting", "= ArrowVectorField( func, x_range=[-7, 7, 1], y_range=[-4, 4, 1], length_func=lambda", "is no point in generating an image if the vector", "line by line. Otherwise it starts with all lines shown.", "LEFT) - pos) / 3 self.add(StreamLines(func)) .. manim:: SpawningAndFlowingArea :save_last_frame:", "every position. The values of this functions is displayed by", "points = [point] for _ in range(max_steps): last_point = points[-1]", "field. If `False` the vector field takes effect on the", "Returns ------- float How much the point is shifted. \"\"\"", "its speed is determined by the magnitude of the vector", "the vector field. step_size A scalar that is used to", "creation. run_time The run time of every single stream line", "self, dt: float = 1, substeps: int = 1, pointwise:", "func, stroke_width=3, max_anchors_per_line=5, virtual_time=1, color=BLUE ) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5, time_width=0.5)", "the first color in `colors`. Lower values also result in", "2) * UP vector_field = ArrowVectorField( func, x_range=[-7, 7, 1],", "a function defining a vector at every position. This class", "color in `colors`. Higher values also result in the last", "three_dimensions Enables three_dimensions. Default set to False, automatically turns True", "= True self.color = color self.submob_movement_updater = None @staticmethod def", "times will result in removing the previous updater created by", "an agent moves per step is stretched. Lower values result", "By default the color of each vector is determined by", "better approximation of the trajectories in the vector field. virtual_time", "[ Create(line, run_time=run_time, **kwargs) for line in self.stream_lines ] random.shuffle(animations)", "colors))) def pos_to_rgb(pos: np.ndarray) -> Tuple[float, float, float, float]: vec", "without a noticeable cut. Returns ------- :class:`~.AnimationGroup` The animation fading", "lines shown. flow_speed At `flow_speed=1` the distance the flow moves", "len(self.rgbs) - 1 c1 = self.rgbs[int(alpha)] c2 = self.rgbs[min(int(alpha +", "None @staticmethod def shift_func( func: Callable[[np.ndarray], np.ndarray], shift_vector: np.ndarray, )", "turns True if z_range is not None. noise_factor The amount", "animation length is 1.5 times the run time of each", "color at each position is calculated by passing the positing", "+ np.cos(pos[0]) * UP vector_field = ArrowVectorField(func) self.add(vector_field) self.wait() func", "it starts with all lines shown. flow_speed At `flow_speed=1` the", "= StreamLines(func, stroke_width=3, max_anchors_per_line=30) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5) self.wait(stream_lines.virtual_time / stream_lines.flow_speed)", "substeps for _ in range(substeps): if pointwise: mob.apply_function(lambda p: p", "cairo line.set_stroke(width=self.stroke_width / 4.0) norms = np.array( [np.linalg.norm(self.func(point)) for point", "_ in range(max_steps): last_point = points[-1] new_point = last_point +", "= [ \"VectorField\", \"ArrowVectorField\", \"StreamLines\", ] import itertools as it", "gradient of rgbas as a numpy array Parameters ---------- start", "stream_lines = StreamLines( func, x_range=[-3, 3, 0.2], y_range=[-2, 2, 0.2],", "color gradient of the vector field. kwargs : Any Additional", "self.nudge(mob, dt * speed, pointwise=pointwise) def start_submobject_movement( self, speed: float", "= np.linspace(fh / 2, -fh / 2, ph) x_array =", "z_range is set # Takes in actual norm, spits out", "if not defined. n_repeats The number of agents generated at", "The proportion of the stream line shown while being animated", "the amount the mobject is moved along the vector field.", "to move along the vector field dt A scalar to", "vector field. The created vector is based on the function", "y_range = np.arange(*self.y_range) z_range = np.arange(*self.z_range) for x, y, z", "def get_nudge_updater( self, speed: float = 1, pointwise: bool =", "field. virtual_time The time the agents get to move in", "min_radius, max_radius) \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray], color:", "None. noise_factor The amount by which the starting position of", "1), len(self.rgbs) - 1)] alpha %= 1 return interpolate(c1, c2,", "* 5) vf = ArrowVectorField( func, min_color_scheme_value=2, max_color_scheme_value=10, colors=colors )", "not defined. n_repeats The number of agents generated at each", "1], y_range=[-4, 4, 1], stroke_width=3, virtual_time=1, # use shorter lines", "circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater()) self.add(circle, dot) self.wait(6) \"\"\" def runge_kutta(self, p: Sequence[float],", "take a long time to compute. Returns ------- Image.Imgae The", "field. kwargs : Any Additional arguments to be passed to", "simulated upon creation. max_anchors_per_line The maximum number of anchors per", "3, 0.2], y_range=[-2, 2, 0.2], padding=1 ) spawning_area = Rectangle(width=6,", "= stroke_width half_noise = self.noise_factor / 2 np.random.seed(0) start_points =", "an updater. The stream lines will continuously flow Parameters ----------", "the vector field. Higher values therefore result in longer stream", "colors list of colors to generate the gradient Returns -------", "vector_config = {} self.vector_config = vector_config self.func = func x_range", "= ArrowVectorField(func, x_range=[-7, 7, 1]) self.add(vf) self.wait() length_func = lambda", "-> \"VectorField\": \"\"\"Nudge a :class:`~.Mobject` along the vector field. Parameters", "`VectorField`. color The color of the vector field. If set,", "to move a :class:`~.Mobject` along the vector field. When used", "+ runge_kutta(self, p, step_size)) else: mob.shift(runge_kutta(self, mob.get_center(), step_size)) return self", "vector field. Parameters ---------- speed At `speed=1` the distance a", "# Determining Vector positions: x_range: Sequence[float] = None, y_range: Sequence[float]", "delta_x y_range A sequence of y_min, y_max, delta_y z_range A", "construct(self): func = lambda pos: np.sin(pos[1] / 2) * RIGHT", "if self.single_color: line.set_stroke(self.color) else: if config[\"renderer\"] == \"opengl\": # scaled", "= self.rgbs[min(int(alpha + 1), len(self.rgbs) - 1)] alpha %= 1", "flow_speed self.time_width = time_width def end_animation(self) -> AnimationGroup: \"\"\"End the", "The stepsize at which pixels get included in the image.", "`colors`. min_color_scheme_value The value of the color_scheme function to be", "the running stream animation. Raises ------ ValueError if no stream", "in self.submobjects: self.nudge(mob, dt, substeps, pointwise) return self def get_nudge_updater(", "* 0.5)) k_4 = self.func(p + step_size * k_3) return", "inter_alphas) result = np.concatenate( (result, np.full([len(result), 1], opacity)), axis=1, )", "from ..mobject.mobject import Mobject from ..mobject.types.vectorized_mobject import VGroup, VMobject from", "RIGHT + y * UP + z * OUT)) self.set_opacity(self.opacity)", "norms = np.array( [np.linalg.norm(self.func(point)) for point in line.points], ) line.set_rgba_array_direct(", "labels = [Tex(\"Spawning Area\"), Tex(\"Flowing Area\").shift(DOWN * 2.5)] for lbl", "the stream lines. opacity The opacity of the stream lines.", "schemes can be used however. Parameters ---------- func The function", "\"\"\"Start continuously moving all submobjects along the vector field. Calling", "# use shorter lines max_anchors_per_line=5, # better performance with fewer", "True if z_range is not None. length_func The function determining", "The time the agents get to move in the vector", "dt: float = 1, substeps: int = 1, pointwise: bool", "self.remove(line.anim.mobject) line.anim.finish() else: remaining_time = max_run_time - line.time / self.flow_speed", "the vector field. If `False` the vector field takes effect", "np.full([len(result), 1], opacity)), axis=1, ) return result return func class", "import OUT, RIGHT, UP from ..mobject.geometry import Vector from ..mobject.mobject", "is equal to the magnitude of the vector field along", "color from that value using the color gradient. Parameters ----------", "the gradient. max_color_scheme_value The value of the color_scheme function to", "func class ArrowVectorField(VectorField): \"\"\"A :class:`VectorField` represented by a set of", "bool, optional If `True` the animation is initialized line by", "= 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining Vector positions:", "scalar) def nudge( self, mob: Mobject, dt: float = 1,", "def __init__( self, func: Callable[[np.ndarray], np.ndarray], color: Optional[Color] = None,", "norm: 0.45 * sigmoid(norm), opacity: float = 1.0, vector_config: Optional[dict]", "Tex(\"Flowing Area\").shift(DOWN * 2.5)] for lbl in labels: lbl.add_background_rectangle(opacity=0.6, buff=0.05)", "The shift to be applied to the vector field. Returns", "int = 1, pointwise: bool = False, ) -> \"VectorField\":", "lines without a noticeable cut. Returns ------- :class:`~.AnimationGroup` The animation", "previous updater created by this method. Parameters ---------- speed The", "* k_2 + 2.0 * k_3 + k_4) step_size =", "for _ in range(substeps): if pointwise: mob.apply_function(lambda p: p +", "= opacity if vector_config is None: vector_config = {} self.vector_config", "Returns ------- VectorField This vector field. Examples -------- .. manim::", "functions is displayed by moving many agents along the vector", "[Tex(\"Spawning Area\"), Tex(\"Flowing Area\").shift(DOWN * 2.5)] for lbl in labels:", "direction? self.rgbs = np.array(list(map(color_to_rgb, colors))) def pos_to_rgb(pos: np.ndarray) -> Tuple[float,", "= func if color is None: self.single_color = False if", "np.cos(pos[0] / 2) * UP vector_field = ArrowVectorField( func, x_range=[-7,", "0: output *= self.length_func(norm) / norm vect = Vector(output, **self.vector_config)", "* UR + np.cos(pos[1] / 2) * LEFT stream_lines =", "Type import numpy as np from colour import Color from", "field. Returns ------- `Callable[[np.ndarray], np.ndarray]` The shifted vector field function.", "sampling_rate) fw = config[\"frame_width\"] fh = config[\"frame_height\"] points_array = np.zeros((ph,", "\"opengl\": line = OpenGLVMobject() else: line = VMobject() line.duration =", "if no stream line animation is running Examples -------- ..", "else: vect.set_color(self.pos_to_color(point)) return vect class StreamLines(VectorField): \"\"\"StreamLines represent the flow", "opacity=1, **kwargs ): self.x_range = x_range or [ floor(-config[\"frame_width\"] /", "---------- speed At `speed=1` the distance a mobject moves per", "time of the stream lines is used as run time.", "self.wait() .. manim:: Coloring :save_last_frame: class Coloring(Scene): def construct(self): func", "based on a function defining the vector at every position.", "effect on the points of the individual points of the", "to move the mobject along the vector field. If `False`", "\"\"\" self.stop_submobject_movement() self.submob_movement_updater = lambda mob, dt: mob.nudge_submobjects( dt *", "field. x_range A sequence of x_min, x_max, delta_x y_range A", "regular line flash animation but eases out. dt = 1", "construct(self): func = lambda pos: pos - LEFT * 5", "Parameters ---------- lag_ratio The lag ratio of the animation. If", "field. See :meth:`nudge` for details. Returns ------- Callable[[Mobject, float], Mobject]", "have to start the animation before fading it out.\") def", "stepsize at which pixels get included in the image. Lower", "The color of the vector field. If set, position-specific coloring", "for line in mob.stream_lines: line.time += dt * flow_speed if", "for z in np.arange(*self.z_range) ], ) def outside_box(p): return (", "self.stream_lines: create = Create( line, run_time=creation_run_time, rate_func=creation_rate_func, ) if line.time", "for direction? self.rgbs = np.array(list(map(color_to_rgb, colors))) def pos_to_rgb(pos: np.ndarray) ->", "LEFT) - pos stream_lines = StreamLines( func, color=YELLOW, x_range=[-7, 7,", "field function. \"\"\" return lambda p: func(p * scalar) def", "*labels) \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray], color: Optional[Color]", "updater self.flow_speed = flow_speed self.time_width = time_width def end_animation(self) ->", "Lower values result in a better approximation of the trajectories", "from PIL import Image from .. import config from ..animation.composition", "be mapped to the first color in `colors`. Lower values", "of the trajectories in the vector field. virtual_time The time", "as a grid of vectors. By default the color of", "does by default not include any visible elements but provides", "be passed to the :class:`~.VGroup` constructor \"\"\" def __init__( self,", "from ..animation.update import UpdateFromAlphaFunc from ..constants import OUT, RIGHT, UP", "Callable[[Mobject, float], Mobject]: \"\"\"Get an update function to move a", "y_range: Sequence[float] = None, z_range: Sequence[float] = None, three_dimensions: bool", "+ 2.0 * k_3 + k_4) step_size = dt /", "is able to respond to changes in the vector field", "+ self.padding - self.z_range[2] ) max_steps = ceil(virtual_time / dt)", "\"\"\" self.remove_updater(self.submob_movement_updater) self.submob_movement_updater = None return self def get_colored_background_image(self, sampling_rate:", "arrows. vector_config Additional arguments to be passed to the :class:`~.Vector`", "p[1] > self.y_range[1] + self.padding - self.y_range[2] or p[2] <", "point in line.points], ) line.set_rgba_array_direct( self.values_to_rgbas(norms, opacity), name=\"stroke_rgba\", ) else:", "line.time -= self.virtual_time line.anim.interpolate(np.clip(line.time / line.anim.run_time, 0, 1)) self.add_updater(updater) self.flow_animation", "opacity of the stream lines. Examples -------- .. manim:: BasicUsage", "The shifted vector field function. \"\"\" return lambda p: func(p", "y_range=[-2, 2, 0.2], padding=1 ) spawning_area = Rectangle(width=6, height=4) flowing_area", ":class:`~.Mobject`. If `True` the vector field takes effect on the", "value will be used as display size for the vector.", "def get_vectorized_rgba_gradient_function( self, start: float, end: float, colors: Iterable, ):", "flowing_area, *labels) \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray], color:", "padding=3, # Determining stream line appearance: stroke_width=1, opacity=1, **kwargs ):", "move along the vector field, where its speed is determined", "used however. Parameters ---------- func The function defining the rate", "* UR + np.cos(pos[1] / 2) * LEFT vf =", "return interpolate(c1, c2, alpha) self.pos_to_rgb = pos_to_rgb self.pos_to_color = lambda", "a function defining the vector at every position. The values", "step = max(1, int(len(points) / self.max_anchors_per_line)) line.set_points_smoothly(points[::step]) if self.single_color: line.set_stroke(self.color)", "the trajectories in the vector field. virtual_time The time the", "values give more accurate approximations. pointwise Whether to move the", "z in it.product(x_range, y_range, z_range): self.add(self.get_vector(x * RIGHT + y", "vector_field.nudge(dot, -2, 60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater()) self.add(circle, dot) self.wait(6) \"\"\" def", "vector is determined by it's magnitude. Other color schemes can", "to :code:`delta_y / 2` if not defined. n_repeats The number", "x_range=[-7, 7, 1]) self.add(vf) self.wait() length_func = lambda x: x", "vector_config is None: vector_config = {} self.vector_config = vector_config self.func", "and `colors`. min_color_scheme_value The value of the color_scheme function to", "it. Returns ------- VectorField This vector field. Examples -------- ..", "( p[0] < self.x_range[0] - self.padding or p[0] > self.x_range[1]", "= pos_to_rgb self.pos_to_color = lambda pos: rgb_to_color(self.pos_to_rgb(pos)) else: self.single_color =", "colors: Iterable, ): \"\"\" Generates a gradient of rgbas as", "+= dt * self.flow_speed line.anim.interpolate(min(line.time / line.anim.run_time, 1)) if alpha", "= self.func(pos) color_value = np.clip( self.color_scheme(vec), min_color_scheme_value, max_color_scheme_value, ) alpha", "creation_run_time = ( max_run_time / (1 + self.time_width) * creation_staring_speed", "moved along the vector field. step_size A scalar that is", "self.single_color: vect.set_color(self.color) else: vect.set_color(self.pos_to_color(point)) return vect class StreamLines(VectorField): \"\"\"StreamLines represent", "- self.padding or p[0] > self.x_range[1] + self.padding - self.x_range[2]", "stream lines. Examples -------- .. manim:: BasicUsage :save_last_frame: class BasicUsage(Scene):", "field. substeps The amount of steps the whole nudge is", "6.0 * (k_1 + 2.0 * k_2 + 2.0 *", "the positing through a series of steps: Calculate the vector", "line.set_stroke(width=self.stroke_width / 4.0) norms = np.array( [np.linalg.norm(self.func(point)) for point in", "shift_func( func: Callable[[np.ndarray], np.ndarray], shift_vector: np.ndarray, ) -> Callable[[np.ndarray], np.ndarray]:", "DEFAULT_SCALAR_FIELD_COLORS, # Determining Vector positions: x_range: Sequence[float] = None, y_range:", "dt = 1 / config[\"frame_rate\"] animations = [] self.remove_updater(self.flow_animation) self.flow_animation", "------- :class:`~.AnimationGroup` The animation fading out the running stream animation.", "pos: pos - LEFT * 5 colors = [RED, YELLOW,", "**kwargs, ) self.length_func = length_func self.opacity = opacity if vector_config", "Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining stream line starting positions: x_range:", "config[\"renderer\"] == \"opengl\": self.values_to_rgbas = self.get_vectorized_rgba_gradient_function( min_color_scheme_value, max_color_scheme_value, colors, )", "bool = False, ) -> \"VectorField\": \"\"\"Nudge a :class:`~.Mobject` along", "displayed size of the vectors. The actual size of the", "pw = int(config[\"pixel_width\"] / sampling_rate) fw = config[\"frame_width\"] fh =", "The run time of every single stream line creation. The", "len(self.submobjects) animations = [ Create(line, run_time=run_time, **kwargs) for line in", "The amount of steps the whole nudge is divided into.", "x_range=[-7, 7, 1], y_range=[-4, 4, 1], length_func=lambda x: x /", "however. Parameters ---------- func The function defining the rate of", "LEFT stream_lines = StreamLines(func, stroke_width=3, max_anchors_per_line=30) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5) self.wait(stream_lines.virtual_time", "z_range): self.add(self.get_vector(x * RIGHT + y * UP + z", "x_array points_array[:, :, 1] = y_array rgbs = np.apply_along_axis(self.pos_to_rgb, 2,", "line creation. The runtime of the whole animation might be", "- pos stream_lines = StreamLines( func, color=YELLOW, x_range=[-7, 7, 1],", "manim:: Coloring :save_last_frame: class Coloring(Scene): def construct(self): func = lambda", "* UP + (z - half_noise) * OUT + self.noise_factor", "Type[ShowPassingFlash] = ShowPassingFlash, **kwargs ) -> None: \"\"\"Animates the stream", "last color in `colors`. Higher values also result in the", "color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs, ) self.length_func = length_func", "vector. By default this is used to cap the displayed", "= ( noise_factor if noise_factor is not None else self.y_range[2]", "* (len(rgbs) - 1) indices = scaled_alphas.astype(int) next_indices = np.clip(indices", "to be applied to the vector field. Returns ------- `Callable[[np.ndarray],", "speed At `speed=1` the distance a mobject moves per second", "class StreamLineCreation(Scene): def construct(self): func = lambda pos: (pos[0] *", "c2, alpha) self.pos_to_rgb = pos_to_rgb self.pos_to_color = lambda pos: rgb_to_color(self.pos_to_rgb(pos))", "def hide_and_wait(mob, alpha): if alpha == 0: mob.set_stroke(opacity=0) elif alpha", "func The function defining a vector field. shift_vector The shift", "y_array.repeat(...)? points_array[:, :, 0] = x_array points_array[:, :, 1] =", "of the `VectorField`. color The color of the vector field.", "mob in self.submobjects: self.nudge(mob, dt, substeps, pointwise) return self def", "float = 1.0, vector_config: Optional[dict] = None, **kwargs ): self.x_range", "import interpolate, inverse_interpolate from ..utils.color import BLUE_E, GREEN, RED, YELLOW,", "shift_vector The scalar to be applied to the vector field.", "an update function to move a :class:`~.Mobject` along the vector", "every position. The values of this functions is displayed as", "this whole time gets simulated upon creation. max_anchors_per_line The maximum", "Examples -------- .. manim:: EndAnimation class EndAnimation(Scene): def construct(self): func", "run_time = line.duration / flow_speed line.anim = line_animation_class( line, run_time=run_time,", "0.2], y_range=[-2, 2, 0.2], padding=1 ) spawning_area = Rectangle(width=6, height=4)", "opacity of the arrows. vector_config Additional arguments to be passed", "lag_ratio = run_time / 2 / len(self.submobjects) animations = [", "set, position-specific coloring is disabled. color_scheme A function mapping a", "position in the color gradient defined using `min_color_scheme_value`, `max_color_scheme_value` and", "used for inverse interpolation at :func:`~.inverse_interpolate` end end value used", "dt * self.flow_speed line.anim.interpolate(min(line.time / line.anim.run_time, 1)) if alpha ==", "np.ndarray], shift_vector: np.ndarray, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Shift a vector", "manim:: EndAnimation class EndAnimation(Scene): def construct(self): func = lambda pos:", "colors to generate the gradient Returns ------- function to generate", "color gradient defined using `min_color_scheme_value`, `max_color_scheme_value` and `colors`. min_color_scheme_value The", "field to all submobjects. Parameters ---------- dt A scalar to", "+= [[0, 0]] for i in range(len(self.ranges)): if len(self.ranges[i]) ==", "resulting in fully displayed stream lines without a noticeable cut.", "if vector_config is None: vector_config = {} self.vector_config = vector_config", "= Rectangle(width=8, height=6) labels = [Tex(\"Spawning Area\"), Tex(\"Flowing Area\").shift(DOWN *", "run_time: Optional[Callable[[float], float]] = None, **kwargs ) -> AnimationGroup: \"\"\"The", "or self.y_range.copy() self.ranges += [self.z_range] else: self.ranges += [[0, 0]]", ") # creation_run_time is calculated so that the creation animation", "run_time=remaining_time, ), create, ), ) return AnimationGroup(*animations) # TODO: Variant", "colors, **kwargs, ) self.noise_factor = ( noise_factor if noise_factor is", "fading it out.\") def hide_and_wait(mob, alpha): if alpha == 0:", "vectors to reduce the clutter. opacity The opacity of the", "0.3, rate_func: Callable[[float], float] = linear, line_animation_class: Type[ShowPassingFlash] = ShowPassingFlash,", "self.add_updater(self.submob_movement_updater) return self def stop_submobject_movement(self) -> \"VectorField\": \"\"\"Stops the continuous", "values of this functions is displayed as a grid of", "of the stream lines. Examples -------- .. manim:: BasicUsage :save_last_frame:", "time_width=0.5) self.wait(1) self.play(stream_lines.end_animation()) \"\"\" if self.flow_animation is None: raise ValueError(\"You", "to a single value. This value gives the position in", "rate of change at every position of the `VectorField`. color", "norm vect = Vector(output, **self.vector_config) vect.shift(point) if self.single_color: vect.set_color(self.color) else:", "of the vector field. Parameters ---------- speed At `speed=1` the", "from ..constants import OUT, RIGHT, UP from ..mobject.geometry import Vector", "time to compute. Returns ------- Image.Imgae The vector field image.", "-------- .. manim:: StreamLineCreation class StreamLineCreation(Scene): def construct(self): func =", "raise ValueError(\"You have to start the animation before fading it", "ph = int(config[\"pixel_height\"] / sampling_rate) pw = int(config[\"pixel_width\"] / sampling_rate)", "= [RED, YELLOW, BLUE, DARK_GRAY] min_radius = Circle(radius=2, color=colors[0]).shift(LEFT *", "if config[\"renderer\"] == \"opengl\": # scaled for compatibility with cairo", "run_time is None: run_time = self.virtual_time if lag_ratio is None:", "/ norm vect = Vector(output, **self.vector_config) vect.shift(point) if self.single_color: vect.set_color(self.color)", "created vector is based on the function of the vector", "Default set to False, automatically turns True if z_range is", "flow_speed=1.5) self.wait(stream_lines.virtual_time / stream_lines.flow_speed) \"\"\" for line in self.stream_lines: run_time", "field. The created vector is based on the function of", "The lag ratio of the animation. If undefined, it will", "distance is based on the magnitude of the vector field.", "sigmoid(norm), opacity: float = 1.0, vector_config: Optional[dict] = None, **kwargs", "of steps: Calculate the vector field function at that position,", "position of the vector field. color The color of the", "but eases out. dt = 1 / config[\"frame_rate\"] animations =", "pos: np.sin(pos[1]) * RIGHT + np.cos(pos[0]) * UP vector_field =", "continuously moving all submobjects along the vector field. Calling this", "the stream line shown while being animated rate_func The rate", "the vector field and is rooted in the given point.", "\"opengl\": self.values_to_rgbas = self.get_vectorized_rgba_gradient_function( min_color_scheme_value, max_color_scheme_value, colors, ) for point", "random order. Parameters ---------- lag_ratio The lag ratio of the", "The rate function of each stream line flashing line_animation_class The", ":meth:`~.Mobject.add_updater`, the mobject will move along the vector field, where", "color_scheme: Optional[Callable[[np.ndarray], float]] = None, min_color_scheme_value: float = 0, max_color_scheme_value:", "a :class:`~.Mobject` along the vector field. When used with :meth:`~.Mobject.add_updater`,", "image that displays the vector field. The color at each", "line in self.stream_lines: create = Create( line, run_time=creation_run_time, rate_func=creation_rate_func, )", "vector field. When used with :meth:`~.Mobject.add_updater`, the mobject will move", "= Vector(output, **self.vector_config) vect.shift(point) if self.single_color: vect.set_color(self.color) else: vect.set_color(self.pos_to_color(point)) return", ") self.remove(line.anim.mobject) line.anim.finish() else: remaining_time = max_run_time - line.time /", "class ArrowVectorField(VectorField): \"\"\"A :class:`VectorField` represented by a set of change", "func = lambda pos: ((pos[0] * UR + pos[1] *", "= Circle(radius=10, color=colors[-1]).shift(LEFT * 5) vf = ArrowVectorField( func, min_color_scheme_value=2,", "function to move a :class:`~.Mobject` along the vector field. When", "stream lines. Examples -------- .. manim:: StreamLineCreation class StreamLineCreation(Scene): def", "reduce the clutter. opacity The opacity of the arrows. vector_config", "-= self.virtual_time line.anim.interpolate(np.clip(line.time / line.anim.run_time, 0, 1)) self.add_updater(updater) self.flow_animation =", "using `min_color_scheme_value`, `max_color_scheme_value` and `colors`. min_color_scheme_value The value of the", "actual distance is based on the magnitude of the vector", "be mapped to the last color in `colors`. Higher values", "max_anchors_per_line=30) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5) self.wait(stream_lines.virtual_time / stream_lines.flow_speed) \"\"\" for line", "values of this functions is displayed by moving many agents", "animations = [ Create(line, run_time=run_time, **kwargs) for line in self.stream_lines", "None, **kwargs ) -> AnimationGroup: \"\"\"The creation animation of the", "elements but provides methods to move other :class:`~.Mobject` s along", "ArrowVectorField(func, x_range=[-7, 7, 1], length_func=length_func) self.play(vf.animate.become(vf2)) self.wait() .. manim:: Coloring", "spawning_area, flowing_area, *labels) \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray],", "every position. This class does by default not include any", "Circle(radius=2, color=colors[0]).shift(LEFT * 5) max_radius = Circle(radius=10, color=colors[-1]).shift(LEFT * 5)", "is disabled. color_scheme A function mapping a vector to a", "takes effect on the points of the individual points of", "vector field. x_range A sequence of x_min, x_max, delta_x y_range", "other :class:`~.Mobject` s along the vector field. Parameters ---------- func", "norm, spits out displayed norm length_func: Callable[[float], float] = lambda", "= None, y_range: Sequence[float] = None, z_range: Sequence[float] = None,", "not None else self.y_range[2] / 2 ) self.n_repeats = n_repeats", "0.5])): line.set_stroke( [self.pos_to_color(p) for p in line.get_anchors()], ) else: line.color_using_background_image(self.background_img)", "of this flow. time_width The proportion of the stream line", "is running Examples -------- .. manim:: EndAnimation class EndAnimation(Scene): def", "step: continue if config[\"renderer\"] == \"opengl\": line = OpenGLVMobject() else:", "mapping a vector to a single value. This value gives", "self.max_anchors_per_line = max_anchors_per_line self.padding = padding self.stroke_width = stroke_width half_noise", "# TODO: Variant of StreamLines that is able to respond", "divided into. Higher values give more accurate approximations. pointwise Whether", "of the vector. kwargs : Any Additional arguments to be", ") self.play(stream_lines.create()) # uses virtual_time as run_time self.wait() \"\"\" if", "self.func(p + step_size * (k_1 * 0.5)) k_3 = self.func(p", "ShowPassingFlash from ..animation.update import UpdateFromAlphaFunc from ..constants import OUT, RIGHT,", "* scalar) def nudge( self, mob: Mobject, dt: float =", "class BasicUsage(Scene): def construct(self): func = lambda pos: ((pos[0] *", "might be longer due to the `lag_ratio`. If undefined, the", "function to be mapped to the first color in `colors`.", "of anchors per line. Lines with more anchors get reduced", "`lag_ratio`. If undefined, the virtual time of the stream lines", "elif alpha == 1: mob.set_stroke(opacity=1) def finish_updater_cycle(line, alpha): line.time +=", "by which the starting position of each agent is altered", "the color gradient. Parameters ---------- sampling_rate The stepsize at which", "the animation is initialized line by line. Otherwise it starts", "OpenGLVMobject() else: line = VMobject() line.duration = step * dt", "\"\"\" return lambda p: func(p * scalar) def nudge( self,", "---------- point The root point of the vector. kwargs :", "RED, YELLOW, color_to_rgb, rgb_to_color from ..utils.deprecation import deprecated_params from ..utils.rate_functions", "each axis. Defaults to :code:`delta_y / 2` if not defined.", "Returns ------- VectorField This vector field. \"\"\" self.stop_submobject_movement() self.submob_movement_updater =", "vector field takes effect on the center of the given", "mapped to the last color in `colors`. Higher values also", "max_color_scheme_value: float = 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, **kwargs ):", "the vector field. The actual distance is based on the", "SizingAndSpacing(Scene): def construct(self): func = lambda pos: np.sin(pos[0] / 2)", "UR + np.cos(pos[1] / 2) * LEFT stream_lines = StreamLines(", "import Create from ..animation.indication import ShowPassingFlash from ..animation.update import UpdateFromAlphaFunc", "self.func = func if color is None: self.single_color = False", "pixels get included in the image. Lower values give more", "field. step_size A scalar that is used to determine how", "float = 1, pointwise: bool = False, ) -> Callable[[Mobject,", "1], stroke_width=3, virtual_time=1, # use shorter lines max_anchors_per_line=5, # better", "sigmoid from .types.opengl_vectorized_mobject import OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS: list = [BLUE_E, GREEN,", "# Determining stream line appearance: stroke_width=1, opacity=1, **kwargs ): self.x_range", "): \"\"\" Generates a gradient of rgbas as a numpy", "to the vector field. Examples -------- .. manim:: ScaleVectorFieldFunction class", "moving agents. Vector fields are always based on a function", "* self.virtual_time if warm_up: line.time *= -1 self.add(line.anim.mobject) def updater(mob,", "how lines are drawn dt=0.05, virtual_time=3, max_anchors_per_line=100, padding=3, # Determining", "that position, map that vector to a single value using", "displayed norm length_func: Callable[[float], float] = lambda norm: 0.45 *", "interpolation at :func:`~.inverse_interpolate` end end value used for inverse interpolation", "be passed to the :class:`~.VGroup` constructor Examples -------- .. manim::", "np.cos(pos[1] / 2) * LEFT vf = ArrowVectorField(func, x_range=[-7, 7,", "for inverse interpolation at :func:`~.inverse_interpolate` colors list of colors to", "continuous movement started using :meth:`start_submobject_movement`. Returns ------- VectorField This vector", "* sigmoid(norm), opacity: float = 1.0, vector_config: Optional[dict] = None,", "generation area before being terminated. stroke_width The stroke with of", "generated at each starting point. dt The factor by which", "more accurate results, but may take a long time to", "flow. time_width The proportion of the stream line shown while", "z_range is not None. noise_factor The amount by which the", "Any Additional arguments to be passed to the :class:`~.VGroup` constructor", "inter_alphas = scaled_alphas % 1 inter_alphas = inter_alphas.repeat(3).reshape((len(indices), 3)) result", "the stream lines. Examples -------- .. manim:: BasicUsage :save_last_frame: class", "len(self.ranges[i]) == 2: self.ranges[i] += [0.5] self.ranges[i][1] += self.ranges[i][2] self.x_range,", "center of the given :class:`~.Mobject`. If `True` the vector field", "line_animation_class: Type[ShowPassingFlash] = ShowPassingFlash, **kwargs ) -> None: \"\"\"Animates the", "x: x / 3 vf2 = ArrowVectorField(func, x_range=[-7, 7, 1],", "1] = y_array rgbs = np.apply_along_axis(self.pos_to_rgb, 2, points_array) return Image.fromarray((rgbs", "= self.virtual_time / self.flow_speed creation_rate_func = ease_out_sine creation_staring_speed = creation_rate_func(0.001)", "**self.vector_config) vect.shift(point) if self.single_color: vect.set_color(self.color) else: vect.set_color(self.pos_to_color(point)) return vect class", "pos stream_lines = StreamLines( func, color=YELLOW, x_range=[-7, 7, 1], y_range=[-4,", "which the starting position of each agent is altered along", "pointwise=pointwise, ) self.add_updater(self.submob_movement_updater) return self def stop_submobject_movement(self) -> \"VectorField\": \"\"\"Stops", "def create( self, lag_ratio: Optional[float] = None, run_time: Optional[Callable[[float], float]]", "The scalar to be applied to the vector field. Examples", "three_dimensions. Default set to False, automatically turns True if z_range", "np.random.random(3) for n in range(self.n_repeats) for x in np.arange(*self.x_range) for", "..utils.bezier import interpolate, inverse_interpolate from ..utils.color import BLUE_E, GREEN, RED,", "dt * speed, pointwise=pointwise, ) self.add_updater(self.submob_movement_updater) return self def stop_submobject_movement(self)", "runge_kutta(self, p, step_size)) else: mob.shift(runge_kutta(self, mob.get_center(), step_size)) return self def", "at every position. This class does by default not include", "self.func(p + step_size * (k_2 * 0.5)) k_4 = self.func(p", "the :class:`~.VGroup` constructor Examples -------- .. manim:: BasicUsage :save_last_frame: class", "Nudging class Nudging(Scene): def construct(self): func = lambda pos: np.sin(pos[1]", "np.ndarray]` The shifted vector field function. \"\"\" return lambda p:", "opacity=1): alphas = inverse_interpolate(start, end, np.array(values)) alphas = np.clip(alphas, 0,", "rate function of each stream line flashing line_animation_class The animation", "of the vector field. If set, position-specific coloring is disabled.", "False, ) -> Callable[[Mobject, float], Mobject]: \"\"\"Get an update function", "spits out displayed norm length_func: Callable[[float], float] = lambda norm:", "on the points of the individual points of the :class:`~.Mobject`,", "1], length_func=lambda x: x / 2 ) self.add(vector_field) circle =", "p: Sequence[float], step_size: float) -> float: \"\"\"Returns the change in", "and is rooted in the given point. Color and length", "line appearance: stroke_width=1, opacity=1, **kwargs ): self.x_range = x_range or", "= self.noise_factor / 2 np.random.seed(0) start_points = np.array( [ (x", "* speed, pointwise=pointwise, ) self.add_updater(self.submob_movement_updater) return self def stop_submobject_movement(self) ->", "def construct(self): func = lambda pos: np.sin(pos[0]) * UR +", "------- `Callable[[np.ndarray], np.ndarray]` The scaled vector field function. \"\"\" return", "self.ranges += [[0, 0]] for i in range(len(self.ranges)): if len(self.ranges[i])", "!= np.array([0, 0.5, 0.5])): line.set_stroke( [self.pos_to_color(p) for p in line.get_anchors()],", "norm length_func: Callable[[float], float] = lambda norm: 0.45 * sigmoid(norm),", "pw) y_array = np.linspace(fh / 2, -fh / 2, ph)", "use shorter lines max_anchors_per_line=5, # better performance with fewer anchors", "color of each vector is determined by it's magnitude. Other", "no stream line animation is running Examples -------- .. manim::", "* UP vector_field = ArrowVectorField( func, x_range=[-7, 7, 1], y_range=[-4,", "The stream lines appear in random order. Parameters ---------- lag_ratio", "rate of change at every position of the vector field.", "mobject along the vector field. See :meth:`nudge` for details. Returns", "= lambda norm: 0.45 * sigmoid(norm), opacity: float = 1.0,", "as run time. Returns ------- :class:`~.AnimationGroup` The creation animation of", "np.arange(*self.y_range) z_range = np.arange(*self.z_range) for x, y, z in it.product(x_range,", "pos: np.sin(pos[0] / 2) * UR + np.cos(pos[1] / 2)", "fw = config[\"frame_width\"] fh = config[\"frame_height\"] points_array = np.zeros((ph, pw,", "vector field. See :meth:`nudge` for details. Returns ------- Callable[[Mobject, float],", "for x, y, z in it.product(x_range, y_range, z_range): self.add(self.get_vector(x *", "altered along each axis. Defaults to :code:`delta_y / 2` if", "y_array = y_array.reshape((len(y_array), 1)) x_array = x_array.repeat(ph, axis=0) y_array.repeat(pw, axis=1)", "speed # as the regular line flash animation but eases", "therefore result in longer stream lines. However, this whole time", "-2, 60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater()) self.add(circle, dot) self.wait(6) \"\"\" def runge_kutta(self,", "None, three_dimensions: bool = False, # Automatically True if z_range", "lines are drawn dt=0.05, virtual_time=3, max_anchors_per_line=100, padding=3, # Determining stream", "opacity=opacity) self.add(line) self.stream_lines = [*self.submobjects] def create( self, lag_ratio: Optional[float]", "being terminated. stroke_width The stroke with of the stream lines.", "60, True) vector_field.nudge(dot, -2, 60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater()) self.add(circle, dot) self.wait(6)", "np.linalg.norm(output) if norm != 0: output *= self.length_func(norm) / norm", "The root point of the vector. kwargs : Any Additional", "k_2 + 2.0 * k_3 + k_4) step_size = dt", "self.play(stream_lines.create()) # uses virtual_time as run_time self.wait() \"\"\" if run_time", "= False, ) -> \"VectorField\": \"\"\"Apply a nudge along the", "max_radius = Circle(radius=10, color=colors[-1]).shift(LEFT * 5) vf = ArrowVectorField( func,", "self.flow_animation = None for line in self.stream_lines: create = Create(", "np.sin(pos[0]) * UR + np.cos(pos[1]) * LEFT + pos /", "class SizingAndSpacing(Scene): def construct(self): func = lambda pos: np.sin(pos[0] /", "UP + (z - half_noise) * OUT + self.noise_factor *", "field. shift_vector The scalar to be applied to the vector", "along the vector field. Parameters ---------- mob The mobject to", "float How much the point is shifted. \"\"\" k_1 =", "not None. noise_factor The amount by which the starting position", "/ 2) * LEFT stream_lines = StreamLines(func, stroke_width=3, max_anchors_per_line=30) self.add(stream_lines)", "= 1, pointwise: bool = False, ) -> \"VectorField\": \"\"\"Nudge", "image. \"\"\" if self.single_color: raise ValueError( \"There is no point", "magnitude of the vector field. Parameters ---------- speed At `speed=1`", "field. Vector fields are based on a function defining a", "self.rgbs[int(alpha)] c2 = self.rgbs[min(int(alpha + 1), len(self.rgbs) - 1)] alpha", "RIGHT + np.cos(pos[0] / 2) * UP vector_field = ArrowVectorField(", "the :class:`~.VGroup` constructor \"\"\" def __init__( self, func: Callable[[np.ndarray], np.ndarray],", "point being moved along the vector field. step_size A scalar", "return vect class StreamLines(VectorField): \"\"\"StreamLines represent the flow of a", "if np.any(self.z_range != np.array([0, 0.5, 0.5])): line.set_stroke( [self.pos_to_color(p) for p", "in a better approximation of the trajectories in the vector", "BasicUsage :save_last_frame: class BasicUsage(Scene): def construct(self): func = lambda pos:", "field. Examples -------- .. manim:: Nudging class Nudging(Scene): def construct(self):", ": bool, optional If `True` the animation is initialized line", "the vector field. virtual_time The time the agents get to", "= OpenGLVMobject() else: line = VMobject() line.duration = step *", "is shifted in a single step. Returns ------- float How", "x_range=[-3, 3, 0.2], y_range=[-2, 2, 0.2], padding=1 ) spawning_area =", "line.set_rgba_array_direct( self.values_to_rgbas(norms, opacity), name=\"stroke_rgba\", ) else: if np.any(self.z_range != np.array([0,", "if z_range is not None. noise_factor The amount by which", "- pos) / 3 self.add(ArrowVectorField(func)) .. manim:: SizingAndSpacing class SizingAndSpacing(Scene):", "c2 = self.rgbs[min(int(alpha + 1), len(self.rgbs) - 1)] alpha %=", "color The color of the vector field. If set, position-specific", "import Callable, Iterable, Optional, Sequence, Tuple, Type import numpy as", "animation. If undefined, it will be selected so that the", "based on a function defining a vector at every position.", "dt step = max(1, int(len(points) / self.max_anchors_per_line)) line.set_points_smoothly(points[::step]) if self.single_color:", "self.rgbs[min(int(alpha + 1), len(self.rgbs) - 1)] alpha %= 1 return", "------- :class:`~.AnimationGroup` The creation animation of the stream lines. Examples", "[[0, 0]] for i in range(len(self.ranges)): if len(self.ranges[i]) == 2:", "better performance with fewer anchors ) self.play(stream_lines.create()) # uses virtual_time", "set # Takes in actual norm, spits out displayed norm", "Returns ------- Callable[[Mobject, float], Mobject] The update function. \"\"\" return", "time_width def end_animation(self) -> AnimationGroup: \"\"\"End the stream line animation", "AnimationGroup, Succession from ..animation.creation import Create from ..animation.indication import ShowPassingFlash", "\"VectorField\": \"\"\"Stops the continuous movement started using :meth:`start_submobject_movement`. Returns -------", "calculated by passing the positing through a series of steps:", "on the function of the vector field and is rooted", "substeps: int = 1, pointwise: bool = False, ) ->", "half_noise) * RIGHT + (y - half_noise) * UP +", "this method. Parameters ---------- speed The speed at which to", "give more accurate approximations. pointwise Whether to move the mobject", "self.y_range] if three_dimensions or z_range: self.z_range = z_range or self.y_range.copy()", "virtual_time=1, # use shorter lines max_anchors_per_line=5, # better performance with", "the image. Lower values give more accurate results, but may", "dt * speed, pointwise=pointwise) def start_submobject_movement( self, speed: float =", "z_range = np.arange(*self.z_range) for x, y, z in it.product(x_range, y_range,", "if alpha == 0: mob.set_stroke(opacity=0) elif alpha == 1: mob.set_stroke(opacity=1)", "magnitude of the vector field along its path. The speed", "vector to a single value using `self.color_scheme` and finally generate", "# TODO maybe other default for direction? self.rgbs = np.array(list(map(color_to_rgb,", "self.virtual_time / self.flow_speed creation_rate_func = ease_out_sine creation_staring_speed = creation_rate_func(0.001) *", "x_range = np.arange(*self.x_range) y_range = np.arange(*self.y_range) z_range = np.arange(*self.z_range) for", "= self.func(p) k_2 = self.func(p + step_size * (k_1 *", ":, 1] = y_array rgbs = np.apply_along_axis(self.pos_to_rgb, 2, points_array) return", "7, 1], y_range=[-4, 4, 1], stroke_width=3, virtual_time=1, # use shorter", "a mobject. pointwise Whether to move the mobject along the", "as display size for the vector. By default this is", "* OUT)) self.set_opacity(self.opacity) def get_vector(self, point: np.ndarray): \"\"\"Creates a vector", "self.get_colored_background_image() if config[\"renderer\"] == \"opengl\": self.values_to_rgbas = self.get_vectorized_rgba_gradient_function( min_color_scheme_value, max_color_scheme_value,", "the vector field dt A scalar to the amount the", "pos - LEFT * 5 colors = [RED, YELLOW, BLUE,", "agents can move out of the generation area before being", "run_time = self.virtual_time if lag_ratio is None: lag_ratio = run_time", "0.45 * sigmoid(norm), opacity: float = 1.0, vector_config: Optional[dict] =", "deprecated_params from ..utils.rate_functions import ease_out_sine, linear from ..utils.simple_functions import sigmoid", "`colors`. Lower values also result in the first color of", "point. dt The factor by which the distance an agent", "floor from typing import Callable, Iterable, Optional, Sequence, Tuple, Type", "a vector field. shift_vector The shift to be applied to", "alpha *= len(self.rgbs) - 1 c1 = self.rgbs[int(alpha)] c2 =", "the vector field. Parameters ---------- speed At `speed=1` the distance", "import VGroup, VMobject from ..utils.bezier import interpolate, inverse_interpolate from ..utils.color", "mob The mobject to move along the vector field dt", "other default for direction? self.rgbs = np.array(list(map(color_to_rgb, colors))) def pos_to_rgb(pos:", "= None, min_color_scheme_value: float = 0, max_color_scheme_value: float = 2,", "= {} self.vector_config = vector_config self.func = func x_range =", "- 1) inter_alphas = scaled_alphas % 1 inter_alphas = inter_alphas.repeat(3).reshape((len(indices),", "class SpawningAndFlowingArea(Scene): def construct(self): func = lambda pos: np.sin(pos[0]) *", "func if color is None: self.single_color = False if color_scheme", "dt) + 1 if not self.single_color: self.background_img = self.get_colored_background_image() if", "True) vector_field.nudge(dot, -2, 60) circle.add_updater(vector_field.get_nudge_updater(pointwise=True)) dot.add_updater(vector_field.get_nudge_updater()) self.add(circle, dot) self.wait(6) \"\"\"", "and showing their trace. Parameters ---------- func The function defining", "\"\"\" k_1 = self.func(p) k_2 = self.func(p + step_size *", "displays the vector field. The color at each position is", "0.5) self.play(vector_field.animate.become(ArrowVectorField(func))) self.wait() Returns ------- `Callable[[np.ndarray], np.ndarray]` The scaled vector", "= ( max_run_time / (1 + self.time_width) * creation_staring_speed )", "import itertools as it import random from math import ceil,", ") -> \"VectorField\": \"\"\"Nudge a :class:`~.Mobject` along the vector field.", "Color from PIL import Image from .. import config from", "virtual_time The time the agents get to move in the", "field. The color at each position is calculated by passing", "= color self.submob_movement_updater = None @staticmethod def shift_func( func: Callable[[np.ndarray],", "class EndAnimation(Scene): def construct(self): func = lambda pos: np.sin(pos[0] /", "= self.ranges super().__init__( func, color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors, **kwargs,", "+ step_size * (k_1 * 0.5)) k_3 = self.func(p +", "z_range: Sequence[float] = None, three_dimensions: bool = False, noise_factor: Optional[float]", "VMobject() line.duration = step * dt step = max(1, int(len(points)", "= x_array.reshape((1, len(x_array))) y_array = y_array.reshape((len(y_array), 1)) x_array = x_array.repeat(ph,", "run_time The run time of every single stream line creation.", "y_array.reshape((len(y_array), 1)) x_array = x_array.repeat(ph, axis=0) y_array.repeat(pw, axis=1) # TODO", "pos[1] * LEFT) - pos stream_lines = StreamLines( func, color=YELLOW,", ") self.length_func = length_func self.opacity = opacity if vector_config is", "---------- warm_up : bool, optional If `True` the animation is", "Mobject]: \"\"\"Get an update function to move a :class:`~.Mobject` along", "None, z_range: Sequence[float] = None, three_dimensions: bool = False, noise_factor:", "stream lines using an updater. The stream lines will continuously", "\"\"\"Shift a vector field function. Parameters ---------- func The function", "padding=1 ) spawning_area = Rectangle(width=6, height=4) flowing_area = Rectangle(width=8, height=6)", "= y_array.repeat(...)? points_array[:, :, 0] = x_array points_array[:, :, 1]", "VectorField This vector field. \"\"\" self.stop_submobject_movement() self.submob_movement_updater = lambda mob,", "`False` the vector field takes effect on the center of", "self.time_width = time_width def end_animation(self) -> AnimationGroup: \"\"\"End the stream", "1)) x_array = x_array.repeat(ph, axis=0) y_array.repeat(pw, axis=1) # TODO why", "np.zeros((ph, pw, 3)) x_array = np.linspace(-fw / 2, fw /", "result in removing the previous updater created by this method.", "stream line creation. run_time The run time of every single", "used with :meth:`~.Mobject.add_updater`, the mobject will move along the vector", "construct(self): func = lambda pos: ((pos[0] * UR + pos[1]", "VGroup, VMobject from ..utils.bezier import interpolate, inverse_interpolate from ..utils.color import", "return lambda p: func(p * scalar) def nudge( self, mob:", "ContinuousMotion class ContinuousMotion(Scene): def construct(self): func = lambda pos: np.sin(pos[0]", ") spawning_area = Rectangle(width=6, height=4) flowing_area = Rectangle(width=8, height=6) labels", "the vector field. Parameters ---------- func The function defining the", ":class:`VectorField` represented by a set of change vectors. Vector fields", "vect.set_color(self.color) else: vect.set_color(self.pos_to_color(point)) return vect class StreamLines(VectorField): \"\"\"StreamLines represent the", "2 / len(self.submobjects) animations = [ Create(line, run_time=run_time, **kwargs) for", "lag_ratio=lag_ratio) def start_animation( self, warm_up=True, flow_speed: float = 1, time_width:", "applied to the vector field. Returns ------- `Callable[[np.ndarray], np.ndarray]` The", "by passing the positing through a series of steps: Calculate", "from ..mobject.types.vectorized_mobject import VGroup, VMobject from ..utils.bezier import interpolate, inverse_interpolate", "return func class ArrowVectorField(VectorField): \"\"\"A :class:`VectorField` represented by a set", "False, automatically turns True if z_range is not None. length_func", "= x_array points_array[:, :, 1] = y_array rgbs = np.apply_along_axis(self.pos_to_rgb,", "When used with :meth:`~.Mobject.add_updater`, the mobject will move along the", "= ShowPassingFlash, **kwargs ) -> None: \"\"\"Animates the stream lines", "vector field uses a single color.\", ) ph = int(config[\"pixel_height\"]", "def construct(self): func = lambda pos: (pos[0] * UR +", "by it's magnitude. Other color schemes can be used however.", "/ 2 ) self.add(vector_field) circle = Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY)) dot =", "y_array.repeat(pw, axis=1) # TODO why not y_array = y_array.repeat(...)? points_array[:,", "if color is None: self.single_color = False if color_scheme is", "speed value scales the speed of this flow. time_width The", "single stream line creation. The runtime of the whole animation", "------- Image.Imgae The vector field image. \"\"\" if self.single_color: raise", "def updater(mob, dt): for line in mob.stream_lines: line.time += dt", "the vector field. substeps The amount of steps the whole", "self.wait() Returns ------- `Callable[[np.ndarray], np.ndarray]` The scaled vector field function.", ":meth:`nudge` for details. Returns ------- Callable[[Mobject, float], Mobject] The update", "self.stream_lines: run_time = line.duration / flow_speed line.anim = line_animation_class( line,", "automatically turns True if z_range is not None. length_func The", "+= [self.z_range] else: self.ranges += [[0, 0]] for i in", ") self.noise_factor = ( noise_factor if noise_factor is not None", "pointwise Whether to move the mobject along the vector field.", "UpdateFromAlphaFunc from ..constants import OUT, RIGHT, UP from ..mobject.geometry import", "self.add(ArrowVectorField(func)) .. manim:: SizingAndSpacing class SizingAndSpacing(Scene): def construct(self): func =", "+ self.padding - self.y_range[2] or p[2] < self.z_range[0] - self.padding", "= max_steps if not step: continue if config[\"renderer\"] == \"opengl\":", "ease_out_sine creation_staring_speed = creation_rate_func(0.001) * 1000 creation_run_time = ( max_run_time", "self def get_nudge_updater( self, speed: float = 1, pointwise: bool", "] self.ranges = [self.x_range, self.y_range] if three_dimensions or z_range: self.z_range", "lines will continuously flow Parameters ---------- warm_up : bool, optional", "] random.shuffle(animations) return AnimationGroup(*animations, lag_ratio=lag_ratio) def start_animation( self, warm_up=True, flow_speed:", "used as display size for the vector. By default this", "- self.padding or p[2] > self.z_range[1] + self.padding - self.z_range[2]", "size of vectors to reduce the clutter. opacity The opacity", "func = lambda pos: (pos[0] * UR + pos[1] *", "the change in position of a point along a vector", "\"\"\"The creation animation of the stream lines. The stream lines", "* 0.5)) k_3 = self.func(p + step_size * (k_2 *", "of this functions is displayed by moving many agents along", "\"\"\" rgbs = np.array([color_to_rgb(c) for c in colors]) def func(values,", "self.y_range = y_range or [ floor(-config[\"frame_height\"] / 2), ceil(config[\"frame_height\"] /", "along the vector field. See :meth:`nudge` for details. Returns -------", "of colors to generate the gradient Returns ------- function to", "def construct(self): func = lambda pos: ((pos[0] * UR +", "StreamLines( func, stroke_width=3, max_anchors_per_line=5, virtual_time=1, color=BLUE ) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5,", "potentially distorting it. Returns ------- VectorField This vector field. Examples", "= run_time / 2 / len(self.submobjects) animations = [ Create(line,", "of the color_scheme function to be mapped to the first", "scaled_alphas % 1 inter_alphas = inter_alphas.repeat(3).reshape((len(indices), 3)) result = interpolate(rgbs[indices],", ".. manim:: EndAnimation class EndAnimation(Scene): def construct(self): func = lambda", "sampling_rate) pw = int(config[\"pixel_width\"] / sampling_rate) fw = config[\"frame_width\"] fh", ") -> None: \"\"\"Animates the stream lines using an updater.", "max_color_scheme_value, colors, **kwargs, ) self.noise_factor = ( noise_factor if noise_factor", "the color_scheme function to be mapped to the first color", ") line.anim.begin() line.time = random.random() * self.virtual_time if warm_up: line.time", "gives the position in the color gradient defined using `min_color_scheme_value`,", "1)] alpha %= 1 return interpolate(c1, c2, alpha) self.pos_to_rgb =", "vector field. substeps The amount of steps the whole nudge", "Iterable, ): \"\"\" Generates a gradient of rgbas as a", "of change vectors. Vector fields are always based on a", "position of the `VectorField`. color The color of the vector", "buff=0.05) self.add(stream_lines, spawning_area, flowing_area, *labels) \"\"\" def __init__( self, func:", "< self.x_range[0] - self.padding or p[0] > self.x_range[1] + self.padding", "self.ranges += [self.z_range] else: self.ranges += [[0, 0]] for i", "a series of steps: Calculate the vector field function at", "range(substeps): if pointwise: mob.apply_function(lambda p: p + runge_kutta(self, p, step_size))", "/ 2) * LEFT vf = ArrowVectorField(func, x_range=[-7, 7, 1])", "**kwargs) for line in self.stream_lines ] random.shuffle(animations) return AnimationGroup(*animations, lag_ratio=lag_ratio)", "in mob.stream_lines: line.time += dt * flow_speed if line.time >=", "field takes effect on the center of the given :class:`~.Mobject`.", "This vector field. \"\"\" self.stop_submobject_movement() self.submob_movement_updater = lambda mob, dt:", "from colour import Color from PIL import Image from ..", "/ 2, pw) y_array = np.linspace(fh / 2, -fh /", "mob.set_stroke(opacity=1) def finish_updater_cycle(line, alpha): line.time += dt * self.flow_speed line.anim.interpolate(min(line.time", "lines appear in random order. Parameters ---------- lag_ratio The lag", "), create, ), ) return AnimationGroup(*animations) # TODO: Variant of", "nudge( self, mob: Mobject, dt: float = 1, substeps: int", "axis=0) y_array.repeat(pw, axis=1) # TODO why not y_array = y_array.repeat(...)?", "map that vector to a single value using `self.color_scheme` and", "y_min, y_max, delta_y z_range A sequence of z_min, z_max, delta_z", "results, but may take a long time to compute. Returns", "gradients as numpy arrays representing rgba values \"\"\" rgbs =", "x_range: Sequence[float] = None, y_range: Sequence[float] = None, z_range: Sequence[float]", "\"VectorField\", \"ArrowVectorField\", \"StreamLines\", ] import itertools as it import random", "the vector field along its path. The speed value scales", "pos[1] * LEFT) - pos) / 3 self.add(ArrowVectorField(func)) .. manim::", "stretched. Lower values result in a better approximation of the", ") line.set_rgba_array_direct( self.values_to_rgbas(norms, opacity), name=\"stroke_rgba\", ) else: if np.any(self.z_range !=", "float]: vec = self.func(pos) color_value = np.clip( self.color_scheme(vec), min_color_scheme_value, max_color_scheme_value,", "= 1, pointwise: bool = False, ) -> Callable[[Mobject, float],", "VectorField This vector field. Examples -------- .. manim:: Nudging class", "UR + np.cos(pos[1] / 2) * LEFT vf = ArrowVectorField(func,", "for x in np.arange(*self.x_range) for y in np.arange(*self.y_range) for z", "function. \"\"\" return lambda mob, dt: self.nudge(mob, dt * speed,", "speed value scales the speed of such a mobject. pointwise", "by default not include any visible elements but provides methods", "for line in self.stream_lines: run_time = line.duration / flow_speed line.anim", "time_width The proportion of the stream line shown while being", "passing the positing through a series of steps: Calculate the", "passed to the :class:`~.VGroup` constructor Examples -------- .. manim:: BasicUsage", "def end_animation(self) -> AnimationGroup: \"\"\"End the stream line animation smoothly.", "line in self.stream_lines: run_time = line.duration / flow_speed line.anim =", "+ (z - half_noise) * OUT + self.noise_factor * np.random.random(3)", "\"\"\" return lambda mob, dt: self.nudge(mob, dt * speed, pointwise=pointwise)", "the agents get to move in the vector field. Higher", "..utils.deprecation import deprecated_params from ..utils.rate_functions import ease_out_sine, linear from ..utils.simple_functions", "move along the vector field dt A scalar to the", "/ 2) * UP vector_field = ArrowVectorField( func, x_range=[-7, 7,", "will be used as display size for the vector. By", "# uses virtual_time as run_time self.wait() \"\"\" if run_time is", "run_time self.wait() \"\"\" if run_time is None: run_time = self.virtual_time", "give more accurate results, but may take a long time", "= np.apply_along_axis(self.pos_to_rgb, 2, points_array) return Image.fromarray((rgbs * 255).astype(\"uint8\")) def get_vectorized_rgba_gradient_function(", "every position of the `VectorField`. color The color of the", "UpdateFromAlphaFunc( line, finish_updater_cycle, run_time=remaining_time, ), create, ), ) return AnimationGroup(*animations)", "[RED, YELLOW, BLUE, DARK_GRAY] min_radius = Circle(radius=2, color=colors[0]).shift(LEFT * 5)", "+ z * OUT)) self.set_opacity(self.opacity) def get_vector(self, point: np.ndarray): \"\"\"Creates", "delta_y z_range A sequence of z_min, z_max, delta_z three_dimensions Enables", "np.sin(pos[1]) * RIGHT + np.cos(pos[0]) * UP vector_field = ArrowVectorField(func)", "height=4) flowing_area = Rectangle(width=8, height=6) labels = [Tex(\"Spawning Area\"), Tex(\"Flowing", "to reduce the clutter. opacity The opacity of the arrows.", "sampling_rate: int = 5) -> Image.Image: \"\"\"Generate an image that", "max_color_scheme_value=10, colors=colors ) self.add(vf, min_radius, max_radius) \"\"\" def __init__( self,", "* (k_2 * 0.5)) k_4 = self.func(p + step_size *", "nudge_submobjects( self, dt: float = 1, substeps: int = 1,", "vector field. The color at each position is calculated by", "if color_scheme is None: def color_scheme(p): return np.linalg.norm(p) self.color_scheme =", "+ np.cos(pos[1] / 2) * LEFT stream_lines = StreamLines( func,", "opacity)), axis=1, ) return result return func class ArrowVectorField(VectorField): \"\"\"A", "self.virtual_time if lag_ratio is None: lag_ratio = run_time / 2", "line.time += dt * self.flow_speed line.anim.interpolate(min(line.time / line.anim.run_time, 1)) if", "mob.apply_function(lambda p: p + runge_kutta(self, p, step_size)) else: mob.shift(runge_kutta(self, mob.get_center(),", "of a :class:`VectorField` using the trace of moving agents. Vector", "the :class:`~.Vector` constructor \"\"\" output = np.array(self.func(point)) norm = np.linalg.norm(output)", "Sequence[float] = None, three_dimensions: bool = False, # Automatically True", "for lbl in labels: lbl.add_background_rectangle(opacity=0.6, buff=0.05) self.add(stream_lines, spawning_area, flowing_area, *labels)", "for point in line.points], ) line.set_rgba_array_direct( self.values_to_rgbas(norms, opacity), name=\"stroke_rgba\", )", "2, ph) x_array = x_array.reshape((1, len(x_array))) y_array = y_array.reshape((len(y_array), 1))", "of the vector field. color The color of the vector", "self.flow_speed = flow_speed self.time_width = time_width def end_animation(self) -> AnimationGroup:", "2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining stream line starting", "the rate of change at every position of the vector", "(1 + self.time_width) * creation_staring_speed ) # creation_run_time is calculated", "/ config[\"frame_rate\"] animations = [] self.remove_updater(self.flow_animation) self.flow_animation = None for", "be longer due to the `lag_ratio`. If undefined, the virtual", "p: func(p * scalar) def nudge( self, mob: Mobject, dt:", "y_range A sequence of y_min, y_max, delta_y z_range A sequence", "where its speed is determined by the magnitude of the", "np.arange(*self.z_range) ], ) def outside_box(p): return ( p[0] < self.x_range[0]", "lambda norm: 0.45 * sigmoid(norm), opacity: float = 1.0, vector_config:", "), ) self.remove(line.anim.mobject) line.anim.finish() else: remaining_time = max_run_time - line.time", "line.time / self.flow_speed animations.append( Succession( UpdateFromAlphaFunc( line, finish_updater_cycle, run_time=remaining_time, ),", "animation before fading it out.\") def hide_and_wait(mob, alpha): if alpha", "Iterable, Optional, Sequence, Tuple, Type import numpy as np from", "How much the point is shifted. \"\"\" k_1 = self.func(p)", "the vector field. x_range A sequence of x_min, x_max, delta_x", "so that the total animation length is 1.5 times the", "from .. import config from ..animation.composition import AnimationGroup, Succession from", "accurate approximations. pointwise Whether to move the mobject along the", "approximations. pointwise Whether to move the mobject along the vector", "this method multiple times will result in removing the previous", "\"\"\"Apply a nudge along the vector field to all submobjects.", "return result return func class ArrowVectorField(VectorField): \"\"\"A :class:`VectorField` represented by", ") self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5, time_width=0.5) self.wait(1) self.play(stream_lines.end_animation()) \"\"\" if self.flow_animation", "The opacity of the arrows. vector_config Additional arguments to be", "= [Tex(\"Spawning Area\"), Tex(\"Flowing Area\").shift(DOWN * 2.5)] for lbl in", "this is used to cap the displayed size of vectors", "self.nudge(mob, dt, substeps, pointwise) return self def get_nudge_updater( self, speed:", "stroke_width=3, max_anchors_per_line=30) self.add(stream_lines) stream_lines.start_animation(warm_up=False, flow_speed=1.5) self.wait(stream_lines.virtual_time / stream_lines.flow_speed) \"\"\" for", "that the creation animation starts at the same speed #", "`colors`. Higher values also result in the last color of", "vector in the vector field. The created vector is based", "*= -1 self.add(line.anim.mobject) def updater(mob, dt): for line in mob.stream_lines:", "DEFAULT_SCALAR_FIELD_COLORS: list = [BLUE_E, GREEN, YELLOW, RED] class VectorField(VGroup): \"\"\"A", "can be used however. Parameters ---------- func The function defining", "max_run_time / (1 + self.time_width) * creation_staring_speed ) # creation_run_time", "len(x_array))) y_array = y_array.reshape((len(y_array), 1)) x_array = x_array.repeat(ph, axis=0) y_array.repeat(pw,", "self.wait(6) \"\"\" def runge_kutta(self, p: Sequence[float], step_size: float) -> float:", "return self def get_colored_background_image(self, sampling_rate: int = 5) -> Image.Image:", "np.ndarray]: \"\"\"Shift a vector field function. Parameters ---------- func The", "each point being moved along the vector field. step_size A", "interpolation at :func:`~.inverse_interpolate` colors list of colors to generate the", "Lower values give more accurate results, but may take a", "/ 3 self.add(ArrowVectorField(func)) .. manim:: SizingAndSpacing class SizingAndSpacing(Scene): def construct(self):", "in line.get_anchors()], ) else: line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width, opacity=opacity) self.add(line) self.stream_lines =", "first color in `colors`. Lower values also result in the", "pos: np.sin(pos[1] / 2) * RIGHT + np.cos(pos[0] / 2)", "LEFT) - pos) / 3 self.add(ArrowVectorField(func)) .. manim:: SizingAndSpacing class", "**kwargs ): super().__init__(**kwargs) self.func = func if color is None:", "time of each stream line creation. run_time The run time", "func = lambda pos: np.sin(pos[0] / 2) * UR +", "import deprecated_params from ..utils.rate_functions import ease_out_sine, linear from ..utils.simple_functions import", "self.color = color self.submob_movement_updater = None @staticmethod def shift_func( func:", "rgbas as a numpy array Parameters ---------- start start value", "np.array([color_to_rgb(c) for c in colors]) def func(values, opacity=1): alphas =", "self.z_range = z_range or self.y_range.copy() self.ranges += [self.z_range] else: self.ranges", "= None, **kwargs ) -> AnimationGroup: \"\"\"The creation animation of", "\"\"\"Generate an image that displays the vector field. The color", "stream line starting positions: x_range: Sequence[float] = None, y_range: Sequence[float]", "2.0 * k_3 + k_4) step_size = dt / substeps", "The actual distance is based on the magnitude of the", "function defining a vector field. shift_vector The shift to be", "self.padding = padding self.stroke_width = stroke_width half_noise = self.noise_factor /", "2.0 * k_2 + 2.0 * k_3 + k_4) step_size", "will result in removing the previous updater created by this", "the point is shifted. \"\"\" k_1 = self.func(p) k_2 =", "**kwargs ): self.x_range = x_range or [ floor(-config[\"frame_width\"] / 2),", "2 np.random.seed(0) start_points = np.array( [ (x - half_noise) *", "self.virtual_time if warm_up: line.time *= -1 self.add(line.anim.mobject) def updater(mob, dt):", "three_dimensions: bool = False, # Automatically True if z_range is", "continue if config[\"renderer\"] == \"opengl\": line = OpenGLVMobject() else: line", "= DEFAULT_SCALAR_FIELD_COLORS, # Determining stream line starting positions: x_range: Sequence[float]", "include any visible elements but provides methods to move other", "self.z_range[2] ) max_steps = ceil(virtual_time / dt) + 1 if", "Create(line, run_time=run_time, **kwargs) for line in self.stream_lines ] random.shuffle(animations) return", "the gradients as numpy arrays representing rgba values \"\"\" rgbs", "Defaults to :code:`delta_y / 2` if not defined. n_repeats The", "a numpy array Parameters ---------- start start value used for", "point along a vector field. Parameters ---------- p The position", "If `False` the vector field takes effect on the center", "a nudge along the vector field to all submobjects. Parameters", "stream lines. opacity The opacity of the stream lines. Examples", "A scalar that is used to determine how much a", "self.padding or p[2] > self.z_range[1] + self.padding - self.z_range[2] )", "its path. The speed value scales the speed of such", "stream line flashing line_animation_class The animation class being used Examples", "alpha): if alpha == 0: mob.set_stroke(opacity=0) elif alpha == 1:", "defining a vector at every position. This class does by", "mob, dt: mob.nudge_submobjects( dt * speed, pointwise=pointwise, ) self.add_updater(self.submob_movement_updater) return", "y in np.arange(*self.y_range) for z in np.arange(*self.z_range) ], ) def", "shown while being animated rate_func The rate function of each", "a color from that value using the color gradient. Parameters", "given point. Color and length fit the specifications of this", "performance with fewer anchors ) self.play(stream_lines.create()) # uses virtual_time as", "the vector. kwargs : Any Additional arguments to be passed", "vector field. Parameters ---------- mob The mobject to move along", "min_color_scheme_value, max_color_scheme_value, colors, ) for point in start_points: points =", "self.y_range.copy() self.ranges += [self.z_range] else: self.ranges += [[0, 0]] for", "y_range=[-4, 4, 1], length_func=lambda x: x / 2 ) self.add(vector_field)", "0.5)) k_3 = self.func(p + step_size * (k_2 * 0.5))", "z_range: self.z_range = z_range or self.y_range.copy() self.ranges += [self.z_range] else:", "\"\"\" output = np.array(self.func(point)) norm = np.linalg.norm(output) if norm !=", "that displays the vector field. The color at each position", "position. This class does by default not include any visible", "np.clip(alphas, 0, 1) scaled_alphas = alphas * (len(rgbs) - 1)", "self.padding - self.z_range[2] ) max_steps = ceil(virtual_time / dt) +", "mobject is moved along the vector field. The actual distance", "self.add(StreamLines(func)) .. manim:: SpawningAndFlowingArea :save_last_frame: class SpawningAndFlowingArea(Scene): def construct(self): func", "_ in range(substeps): if pointwise: mob.apply_function(lambda p: p + runge_kutta(self,", "= DEFAULT_SCALAR_FIELD_COLORS, # Determining Vector positions: x_range: Sequence[float] = None,", "<= 0: animations.append( Succession( UpdateFromAlphaFunc( line, hide_and_wait, run_time=-line.time / self.flow_speed,", "-> None: \"\"\"Animates the stream lines using an updater. The", ") else: line.color_using_background_image(self.background_img) line.set_stroke(width=self.stroke_width, opacity=opacity) self.add(line) self.stream_lines = [*self.submobjects] def", "trajectories in the vector field. virtual_time The time the agents", "): self.x_range = x_range or [ floor(-config[\"frame_width\"] / 2), ceil(config[\"frame_width\"]", "animation of the stream lines. Examples -------- .. manim:: StreamLineCreation", "values result in a better approximation of the trajectories in", "vector. kwargs : Any Additional arguments to be passed to", "vector field along its path. The speed value scales the", "= None, **kwargs ): self.x_range = x_range or [ floor(-config[\"frame_width\"]", "self.wait() length_func = lambda x: x / 3 vf2 =", "Mobject] The update function. \"\"\" return lambda mob, dt: self.nudge(mob,", "the vector field. Calling this method multiple times will result", "vector field image. \"\"\" if self.single_color: raise ValueError( \"There is", "run_time=run_time, **kwargs) for line in self.stream_lines ] random.shuffle(animations) return AnimationGroup(*animations,", "random.shuffle(animations) return AnimationGroup(*animations, lag_ratio=lag_ratio) def start_animation( self, warm_up=True, flow_speed: float", "\"\"\"End the stream line animation smoothly. Returns an animation resulting", "[self.z_range] else: self.ranges += [[0, 0]] for i in range(len(self.ranges)):", "trace. Parameters ---------- func The function defining the rate of", "import ease_out_sine, linear from ..utils.simple_functions import sigmoid from .types.opengl_vectorized_mobject import", "in the image. Lower values give more accurate results, but", "mob, dt: self.nudge(mob, dt * speed, pointwise=pointwise) def start_submobject_movement( self,", "speed at which to move the submobjects. See :meth:`get_nudge_updater` for", "= length_func self.opacity = opacity if vector_config is None: vector_config", "# creation_run_time is calculated so that the creation animation starts", "run_time=run_time, rate_func=rate_func, time_width=time_width, **kwargs, ) line.anim.begin() line.time = random.random() *", "single color.\", ) ph = int(config[\"pixel_height\"] / sampling_rate) pw =", "..animation.composition import AnimationGroup, Succession from ..animation.creation import Create from ..animation.indication", "grid of vectors. By default the color of each vector", "a better approximation of the trajectories in the vector field.", "functions is displayed as a grid of vectors. By default", "are always based on a function defining the :class:`~.Vector` at", "else: line = VMobject() line.duration = step * dt step", "long time to compute. Returns ------- Image.Imgae The vector field", "vector field. Vector fields are based on a function defining", "for the vector. By default this is used to cap", "* (k_1 * 0.5)) k_3 = self.func(p + step_size *", "change at every position of the `VectorField`. color The color", "Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, # Determining Vector positions: x_range: Sequence[float] =", "Lower values also result in the first color of the", "This value gives the position in the color gradient defined", "self.vector_config = vector_config self.func = func x_range = np.arange(*self.x_range) y_range", "passed, the returned value will be used as display size", "\"\"\"Nudge a :class:`~.Mobject` along the vector field. Parameters ---------- mob", ".. manim:: SizingAndSpacing class SizingAndSpacing(Scene): def construct(self): func = lambda", "0, 1) scaled_alphas = alphas * (len(rgbs) - 1) indices", "SizingAndSpacing class SizingAndSpacing(Scene): def construct(self): func = lambda pos: np.sin(pos[0]", "alphas = np.clip(alphas, 0, 1) scaled_alphas = alphas * (len(rgbs)", "p: func(p - shift_vector) @staticmethod def scale_func( func: Callable[[np.ndarray], np.ndarray],", "max(1, int(len(points) / self.max_anchors_per_line)) line.set_points_smoothly(points[::step]) if self.single_color: line.set_stroke(self.color) else: if", "gradient of the vector field. kwargs : Any Additional arguments", "used for inverse interpolation at :func:`~.inverse_interpolate` colors list of colors", "function to be mapped to the last color in `colors`.", "self.remove(line.anim.mobject) line.anim.finish() max_run_time = self.virtual_time / self.flow_speed creation_rate_func = ease_out_sine", "pointwise=pointwise) def start_submobject_movement( self, speed: float = 1, pointwise: bool", "this flow. time_width The proportion of the stream line shown", "class Nudging(Scene): def construct(self): func = lambda pos: np.sin(pos[1] /", "being animated rate_func The rate function of each stream line", "labels: lbl.add_background_rectangle(opacity=0.6, buff=0.05) self.add(stream_lines, spawning_area, flowing_area, *labels) \"\"\" def __init__(", "as the regular line flash animation but eases out. dt", "def outside_box(p): return ( p[0] < self.x_range[0] - self.padding or", "0, max_color_scheme_value: float = 2, colors: Sequence[Color] = DEFAULT_SCALAR_FIELD_COLORS, **kwargs", "Optional[float] = None, n_repeats=1, # Determining how lines are drawn", "but provides methods to move other :class:`~.Mobject` s along the", "not in length. padding The distance agents can move out", "amount by which the starting position of each agent is", "start value used for inverse interpolation at :func:`~.inverse_interpolate` end end", "float: \"\"\"Returns the change in position of a point along", "Color and length fit the specifications of this vector field.", "else self.y_range[2] / 2 ) self.n_repeats = n_repeats self.virtual_time =", "coloring is disabled. color_scheme A function mapping a vector to", "= 1, time_width: float = 0.3, rate_func: Callable[[float], float] =", "stop_submobject_movement(self) -> \"VectorField\": \"\"\"Stops the continuous movement started using :meth:`start_submobject_movement`.", "- half_noise) * OUT + self.noise_factor * np.random.random(3) for n", "scalar to the amount the mobject is moved along the", "A sequence of x_min, x_max, delta_x y_range A sequence of", "with cairo line.set_stroke(width=self.stroke_width / 4.0) norms = np.array( [np.linalg.norm(self.func(point)) for", "vect.shift(point) if self.single_color: vect.set_color(self.color) else: vect.set_color(self.pos_to_color(point)) return vect class StreamLines(VectorField):", "- line.time / self.flow_speed animations.append( Succession( UpdateFromAlphaFunc( line, finish_updater_cycle, run_time=remaining_time,", "# TODO why not y_array = y_array.repeat(...)? points_array[:, :, 0]", "i in range(len(self.ranges)): if len(self.ranges[i]) == 2: self.ranges[i] += [0.5]", "__init__( self, func: Callable[[np.ndarray], np.ndarray], color: Optional[Color] = None, color_scheme:", "self.length_func = length_func self.opacity = opacity if vector_config is None:", "will continuously flow Parameters ---------- warm_up : bool, optional If", "animation is running Examples -------- .. manim:: EndAnimation class EndAnimation(Scene):", "func(p * scalar) def nudge( self, mob: Mobject, dt: float", "z_range: Sequence[float] = None, three_dimensions: bool = False, # Automatically", "that vector to a single value using `self.color_scheme` and finally", "ArrowVectorField(func, x_range=[-7, 7, 1]) self.add(vf) self.wait() length_func = lambda x:", "move a :class:`~.Mobject` along the vector field. When used with", "UP + z * OUT)) self.set_opacity(self.opacity) def get_vector(self, point: np.ndarray):", "colors=colors ) self.add(vf, min_radius, max_radius) \"\"\" def __init__( self, func:", "maximum number of anchors per line. Lines with more anchors", "values also result in the first color of the gradient.", "not None. length_func The function determining the displayed size of", "* 2.5)] for lbl in labels: lbl.add_background_rectangle(opacity=0.6, buff=0.05) self.add(stream_lines, spawning_area,", "rooted in the given point. Color and length fit the", "in np.arange(*self.z_range) ], ) def outside_box(p): return ( p[0] <", "gradient. max_color_scheme_value The value of the color_scheme function to be", "a vector field function. Parameters ---------- func The function defining", "= np.linspace(-fw / 2, fw / 2, pw) y_array =", "remaining_time = max_run_time - line.time / self.flow_speed animations.append( Succession( UpdateFromAlphaFunc(", "the submobjects. See :meth:`get_nudge_updater` for details. pointwise Whether to move", "= np.clip( self.color_scheme(vec), min_color_scheme_value, max_color_scheme_value, ) alpha = inverse_interpolate( min_color_scheme_value,", ":code:`delta_y / 2` if not defined. n_repeats The number of", "2 ) self.add(vector_field) circle = Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY)) dot = Dot().move_to(circle)", "end end value used for inverse interpolation at :func:`~.inverse_interpolate` colors", "the vector field. color The color of the vector field.", "Optional[Color] = None, color_scheme: Optional[Callable[[np.ndarray], float]] = None, min_color_scheme_value: float", "of the individual points of the :class:`~.Mobject`, potentially distorting it.", "if config[\"renderer\"] == \"opengl\": line = OpenGLVMobject() else: line =", "float]] = None, **kwargs ) -> AnimationGroup: \"\"\"The creation animation", "hide_and_wait(mob, alpha): if alpha == 0: mob.set_stroke(opacity=0) elif alpha ==", "------- `Callable[[np.ndarray], np.ndarray]` The shifted vector field function. \"\"\" return", "that value using the color gradient. Parameters ---------- sampling_rate The", "float, ) -> Callable[[np.ndarray], np.ndarray]: \"\"\"Scale a vector field function.", "function to generate the gradients as numpy arrays representing rgba", "in position of a point along a vector field. Parameters", "Parameters ---------- speed At `speed=1` the distance a mobject moves", "= line_animation_class( line, run_time=run_time, rate_func=rate_func, time_width=time_width, **kwargs, ) line.anim.begin() line.time", "also result in the first color of the gradient. max_color_scheme_value", "alpha %= 1 return interpolate(c1, c2, alpha) self.pos_to_rgb = pos_to_rgb", "line in self.stream_lines ] random.shuffle(animations) return AnimationGroup(*animations, lag_ratio=lag_ratio) def start_animation(", "line creation. run_time The run time of every single stream", "color_value = np.clip( self.color_scheme(vec), min_color_scheme_value, max_color_scheme_value, ) alpha = inverse_interpolate(", "starts at the same speed # as the regular line", "gradient Returns ------- function to generate the gradients as numpy", "step = max_steps if not step: continue if config[\"renderer\"] ==", "creation animation starts at the same speed # as the", "Whether to move the mobject along the vector field. If", "series of steps: Calculate the vector field function at that", "1], length_func=length_func) self.play(vf.animate.become(vf2)) self.wait() .. manim:: Coloring :save_last_frame: class Coloring(Scene):", "to move other :class:`~.Mobject` s along the vector field. Parameters", "= inverse_interpolate(start, end, np.array(values)) alphas = np.clip(alphas, 0, 1) scaled_alphas", "Callable[[Mobject, float], Mobject] The update function. \"\"\" return lambda mob,", "func = lambda pos: np.sin(pos[1]) * RIGHT + np.cos(pos[0]) *", "size of the vectors. The actual size of the vector", "float = 1, substeps: int = 1, pointwise: bool =", "the trace of moving agents. Vector fields are always based", "lines. opacity The opacity of the stream lines. Examples --------", "None, z_range: Sequence[float] = None, three_dimensions: bool = False, #", "* 1000 creation_run_time = ( max_run_time / (1 + self.time_width)", ".. manim:: ScaleVectorFieldFunction class ScaleVectorFieldFunction(Scene): def construct(self): func = lambda", "z_range A sequence of z_min, z_max, delta_z three_dimensions Enables three_dimensions.", "each stream line flashing line_animation_class The animation class being used", "= linear, line_animation_class: Type[ShowPassingFlash] = ShowPassingFlash, **kwargs ) -> None:", "= [ Create(line, run_time=run_time, **kwargs) for line in self.stream_lines ]", "fields.\"\"\" __all__ = [ \"VectorField\", \"ArrowVectorField\", \"StreamLines\", ] import itertools", "x_array.repeat(ph, axis=0) y_array.repeat(pw, axis=1) # TODO why not y_array =", "Callable[[np.ndarray], np.ndarray], color: Optional[Color] = None, color_scheme: Optional[Callable[[np.ndarray], float]] =", "= max(1, int(len(points) / self.max_anchors_per_line)) line.set_points_smoothly(points[::step]) if self.single_color: line.set_stroke(self.color) else:", "self.opacity = opacity if vector_config is None: vector_config = {}", "Image.Image: \"\"\"Generate an image that displays the vector field. The", "the center of the given :class:`~.Mobject`. If `True` the vector", "out. dt = 1 / config[\"frame_rate\"] animations = [] self.remove_updater(self.flow_animation)", "based on a function defining the :class:`~.Vector` at every position.", "many agents along the vector field and showing their trace.", "step_size * k_3) return step_size / 6.0 * (k_1 +", "self.add(line.anim.mobject) def updater(mob, dt): for line in mob.stream_lines: line.time +=", "len(rgbs) - 1) inter_alphas = scaled_alphas % 1 inter_alphas =", "in `colors`. Higher values also result in the last color", "[0.5] self.ranges[i][1] += self.ranges[i][2] self.x_range, self.y_range, self.z_range = self.ranges super().__init__(", "is rooted in the given point. Color and length fit", "------- function to generate the gradients as numpy arrays representing", "flow_speed line.anim = line_animation_class( line, run_time=run_time, rate_func=rate_func, time_width=time_width, **kwargs, )", "dot = Dot().move_to(circle) vector_field.nudge(circle, -2, 60, True) vector_field.nudge(dot, -2, 60)", "self.flow_speed creation_rate_func = ease_out_sine creation_staring_speed = creation_rate_func(0.001) * 1000 creation_run_time", "OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS: list = [BLUE_E, GREEN, YELLOW, RED] class VectorField(VGroup):", "based on the function of the vector field and is", "values therefore result in longer stream lines. However, this whole", "self.add_updater(updater) self.flow_animation = updater self.flow_speed = flow_speed self.time_width = time_width", "/ 2 ) self.n_repeats = n_repeats self.virtual_time = virtual_time self.max_anchors_per_line", "------- float How much the point is shifted. \"\"\" k_1", "speed, pointwise=pointwise, ) self.add_updater(self.submob_movement_updater) return self def stop_submobject_movement(self) -> \"VectorField\":", "equal to the magnitude of the vector field along its", "or [ floor(-config[\"frame_height\"] / 2), ceil(config[\"frame_height\"] / 2), ] self.ranges", "approximation of the trajectories in the vector field. virtual_time The", "vector field dt A scalar to the amount the mobject", "at every position of the `VectorField`. color The color of", "if line.time <= 0: animations.append( Succession( UpdateFromAlphaFunc( line, hide_and_wait, run_time=-line.time", "== \"opengl\": self.values_to_rgbas = self.get_vectorized_rgba_gradient_function( min_color_scheme_value, max_color_scheme_value, colors, ) for", "self.ranges[i][1] += self.ranges[i][2] self.x_range, self.y_range, self.z_range = self.ranges super().__init__( func,", "starting point. dt The factor by which the distance an", "\"VectorField\": \"\"\"Start continuously moving all submobjects along the vector field.", "None: vector_config = {} self.vector_config = vector_config self.func = func", "half_noise) * OUT + self.noise_factor * np.random.random(3) for n in", "Other color schemes can be used however. Parameters ---------- func", "the virtual time of the stream lines is used as", "vector field. Examples -------- .. manim:: ScaleVectorFieldFunction class ScaleVectorFieldFunction(Scene): def", "to cap the displayed size of vectors to reduce the", "Callable, Iterable, Optional, Sequence, Tuple, Type import numpy as np", "representing rgba values \"\"\" rgbs = np.array([color_to_rgb(c) for c in", "/ line.anim.run_time, 1)) if alpha == 1: self.remove(line.anim.mobject) line.anim.finish() max_run_time", "None, **kwargs ): self.x_range = x_range or [ floor(-config[\"frame_width\"] /", "Succession( UpdateFromAlphaFunc( line, finish_updater_cycle, run_time=remaining_time, ), create, ), ) return", "shifted. \"\"\" k_1 = self.func(p) k_2 = self.func(p + step_size", "run time. Returns ------- :class:`~.AnimationGroup` The creation animation of the", "ph) x_array = x_array.reshape((1, len(x_array))) y_array = y_array.reshape((len(y_array), 1)) x_array", "of this functions is displayed as a grid of vectors.", "- half_noise) * RIGHT + (y - half_noise) * UP", "float, colors: Iterable, ): \"\"\" Generates a gradient of rgbas", "of rgbas as a numpy array Parameters ---------- start start", "self.padding - self.y_range[2] or p[2] < self.z_range[0] - self.padding or", "lag ratio of the animation. If undefined, it will be", "start_animation( self, warm_up=True, flow_speed: float = 1, time_width: float =", "= last_point + dt * func(last_point) if outside_box(new_point): break points.append(new_point)", "The created vector is based on the function of the", "of moving agents. Vector fields are always based on a", "import sigmoid from .types.opengl_vectorized_mobject import OpenGLVMobject DEFAULT_SCALAR_FIELD_COLORS: list = [BLUE_E,", "vector field function. Parameters ---------- func The function defining a", "with all lines shown. flow_speed At `flow_speed=1` the distance the", "min_color_scheme_value: float = 0, max_color_scheme_value: float = 2, colors: Sequence[Color]", "field. Parameters ---------- mob The mobject to move along the", "color: Optional[Color] = None, color_scheme: Optional[Callable[[np.ndarray], float]] = None, min_color_scheme_value:", "is used to determine how much a point is shifted", "is calculated by passing the positing through a series of", "the specifications of this vector field. Parameters ---------- point The", "Whether to move the mobject along the vector field. See", "self.stroke_width = stroke_width half_noise = self.noise_factor / 2 np.random.seed(0) start_points", "function defining the vector at every position. The values of", "if pointwise: mob.apply_function(lambda p: p + runge_kutta(self, p, step_size)) else:", "default not include any visible elements but provides methods to", "to start the animation before fading it out.\") def hide_and_wait(mob,", "__all__ = [ \"VectorField\", \"ArrowVectorField\", \"StreamLines\", ] import itertools as", "= ArrowVectorField( func, min_color_scheme_value=2, max_color_scheme_value=10, colors=colors ) self.add(vf, min_radius, max_radius)", "config[\"renderer\"] == \"opengl\": line = OpenGLVMobject() else: line = VMobject()", "shifted vector field function. \"\"\" return lambda p: func(p -", "(k_1 + 2.0 * k_2 + 2.0 * k_3 +", "np.linspace(-fw / 2, fw / 2, pw) y_array = np.linspace(fh", "trace of moving agents. Vector fields are always based on", "StreamLines( func, color=YELLOW, x_range=[-7, 7, 1], y_range=[-4, 4, 1], stroke_width=3,", "of y_min, y_max, delta_y z_range A sequence of z_min, z_max,", "`speed=1` the distance a mobject moves per second is equal", "+ y * UP + z * OUT)) self.set_opacity(self.opacity) def", "arguments to be passed to the :class:`~.Vector` constructor \"\"\" output", "for details. Returns ------- Callable[[Mobject, float], Mobject] The update function.", "sequence of y_min, y_max, delta_y z_range A sequence of z_min,", "2: self.ranges[i] += [0.5] self.ranges[i][1] += self.ranges[i][2] self.x_range, self.y_range, self.z_range", "y_range, z_range): self.add(self.get_vector(x * RIGHT + y * UP +", "this functions is displayed by moving many agents along the", "the magnitude of the vector field. Parameters ---------- speed At", "ceil(config[\"frame_height\"] / 2), ] self.ranges = [self.x_range, self.y_range] if three_dimensions", "+= [0.5] self.ranges[i][1] += self.ranges[i][2] self.x_range, self.y_range, self.z_range = self.ranges", "1, time_width: float = 0.3, rate_func: Callable[[float], float] = linear,", "hide_and_wait, run_time=-line.time / self.flow_speed, ), create, ), ) self.remove(line.anim.mobject) line.anim.finish()", "= np.concatenate( (result, np.full([len(result), 1], opacity)), axis=1, ) return result", "else: mob.shift(runge_kutta(self, mob.get_center(), step_size)) return self def nudge_submobjects( self, dt:", "value gives the position in the color gradient defined using", "a function defining the :class:`~.Vector` at every position. The values", "self.remove_updater(self.flow_animation) self.flow_animation = None for line in self.stream_lines: create =", "or p[2] > self.z_range[1] + self.padding - self.z_range[2] ) max_steps", "single step. Returns ------- float How much the point is", "max_color_scheme_value, ) alpha = inverse_interpolate( min_color_scheme_value, max_color_scheme_value, color_value, ) alpha", "self.z_range[0] - self.padding or p[2] > self.z_range[1] + self.padding -", "is stretched. Lower values result in a better approximation of", "color in `colors`. Lower values also result in the first", "vector field to all submobjects. Parameters ---------- dt A scalar", "+ pos[1] * LEFT) - pos stream_lines = StreamLines( func,", "rate_func=rate_func, time_width=time_width, **kwargs, ) line.anim.begin() line.time = random.random() * self.virtual_time", "lambda pos: np.sin(pos[0]) * UR + np.cos(pos[1]) * LEFT +", "self, warm_up=True, flow_speed: float = 1, time_width: float = 0.3,", "distance a mobject moves per second is equal to the", "before being terminated. stroke_width The stroke with of the stream", "opacity The opacity of the stream lines. Examples -------- ..", "UP vector_field = ArrowVectorField(func) self.add(vector_field) self.wait() func = VectorField.scale_func(func, 0.5)", "= self.virtual_time if lag_ratio is None: lag_ratio = run_time /", "is calculated so that the creation animation starts at the", "def start_submobject_movement( self, speed: float = 1, pointwise: bool =", "# Determining how lines are drawn dt=0.05, virtual_time=3, max_anchors_per_line=100, padding=3,", "as numpy arrays representing rgba values \"\"\" rgbs = np.array([color_to_rgb(c)", "manim:: ScaleVectorFieldFunction class ScaleVectorFieldFunction(Scene): def construct(self): func = lambda pos:", "to all submobjects. Parameters ---------- dt A scalar to the", "the mobject along the vector field. See :meth:`nudge` for details.", "speed is determined by the magnitude of the vector field.", "color=colors[0]).shift(LEFT * 5) max_radius = Circle(radius=10, color=colors[-1]).shift(LEFT * 5) vf", "method. Parameters ---------- speed The speed at which to move", "in longer stream lines. However, this whole time gets simulated", "the given point. Color and length fit the specifications of", "function defining the :class:`~.Vector` at every position. The values of", "reduced in complexity, not in length. padding The distance agents", "Parameters ---------- start start value used for inverse interpolation at", "-> float: \"\"\"Returns the change in position of a point", "before fading it out.\") def hide_and_wait(mob, alpha): if alpha ==", "n_repeats self.virtual_time = virtual_time self.max_anchors_per_line = max_anchors_per_line self.padding = padding", "= DEFAULT_SCALAR_FIELD_COLORS, **kwargs ): super().__init__(**kwargs) self.func = func if color", "* LEFT + pos / 5 stream_lines = StreamLines( func,", "x_max, delta_x y_range A sequence of y_min, y_max, delta_y z_range", "shown. flow_speed At `flow_speed=1` the distance the flow moves per", "= 1, pointwise: bool = False, ) -> \"VectorField\": \"\"\"Start", "x / 2 ) self.add(vector_field) circle = Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY)) dot", "line.anim.finish() max_run_time = self.virtual_time / self.flow_speed creation_rate_func = ease_out_sine creation_staring_speed", "func(values, opacity=1): alphas = inverse_interpolate(start, end, np.array(values)) alphas = np.clip(alphas,", "second is equal to the magnitude of the vector field", "the function of the vector field and is rooted in", "used to cap the displayed size of vectors to reduce", "line.anim = line_animation_class( line, run_time=run_time, rate_func=rate_func, time_width=time_width, **kwargs, ) line.anim.begin()", "* creation_staring_speed ) # creation_run_time is calculated so that the", "get_vectorized_rgba_gradient_function( self, start: float, end: float, colors: Iterable, ): \"\"\"", "), create, ), ) self.remove(line.anim.mobject) line.anim.finish() else: remaining_time = max_run_time", "AnimationGroup(*animations, lag_ratio=lag_ratio) def start_animation( self, warm_up=True, flow_speed: float = 1,", "1) scaled_alphas = alphas * (len(rgbs) - 1) indices =", "self.x_range = x_range or [ floor(-config[\"frame_width\"] / 2), ceil(config[\"frame_width\"] /", "self.z_range = self.ranges super().__init__( func, color, color_scheme, min_color_scheme_value, max_color_scheme_value, colors,", "return ( p[0] < self.x_range[0] - self.padding or p[0] >", "points_array[:, :, 0] = x_array points_array[:, :, 1] = y_array", "= config[\"frame_height\"] points_array = np.zeros((ph, pw, 3)) x_array = np.linspace(-fw", "vector field. shift_vector The shift to be applied to the", "------- VectorField This vector field. \"\"\" self.stop_submobject_movement() self.submob_movement_updater = lambda", "the vector field. Returns ------- `Callable[[np.ndarray], np.ndarray]` The shifted vector", "length_func self.opacity = opacity if vector_config is None: vector_config =", "config from ..animation.composition import AnimationGroup, Succession from ..animation.creation import Create", "the :class:`~.Mobject`, potentially distorting it. Returns ------- VectorField This vector", "animations.append( Succession( UpdateFromAlphaFunc( line, finish_updater_cycle, run_time=remaining_time, ), create, ), )", "flash animation but eases out. dt = 1 / config[\"frame_rate\"]", "move out of the generation area before being terminated. stroke_width", "circle = Circle(radius=2).shift(LEFT) self.add(circle.copy().set_color(GRAY)) dot = Dot().move_to(circle) vector_field.nudge(circle, -2, 60,", "field dt A scalar to the amount the mobject is", "line = VMobject() line.duration = step * dt step =", "be used as display size for the vector. By default", "\"\"\" for line in self.stream_lines: run_time = line.duration / flow_speed", "np.array( [np.linalg.norm(self.func(point)) for point in line.points], ) line.set_rgba_array_direct( self.values_to_rgbas(norms, opacity),", "The update function. \"\"\" return lambda mob, dt: self.nudge(mob, dt", "..constants import OUT, RIGHT, UP from ..mobject.geometry import Vector from", "func: Callable[[np.ndarray], np.ndarray], color: Optional[Color] = None, color_scheme: Optional[Callable[[np.ndarray], float]]", "position of each point being moved along the vector field.", "self.padding or p[1] > self.y_range[1] + self.padding - self.y_range[2] or", "displayed stream lines without a noticeable cut. Returns ------- :class:`~.AnimationGroup`", "point: np.ndarray): \"\"\"Creates a vector in the vector field. The", "vector field function. \"\"\" return lambda p: func(p * scalar)", "rgbs = np.apply_along_axis(self.pos_to_rgb, 2, points_array) return Image.fromarray((rgbs * 255).astype(\"uint8\")) def", "LEFT + pos / 5 stream_lines = StreamLines( func, x_range=[-3,", "StreamLines( func, x_range=[-3, 3, 0.2], y_range=[-2, 2, 0.2], padding=1 )", "color_scheme A function mapping a vector to a single value.", "k_2 = self.func(p + step_size * (k_1 * 0.5)) k_3", "end: float, colors: Iterable, ): \"\"\" Generates a gradient of", "start start value used for inverse interpolation at :func:`~.inverse_interpolate` end", "is altered along each axis. Defaults to :code:`delta_y / 2`", "generate the gradient Returns ------- function to generate the gradients", "* LEFT) - pos) / 3 self.add(StreamLines(func)) .. manim:: SpawningAndFlowingArea", "------- Callable[[Mobject, float], Mobject] The update function. \"\"\" return lambda", "p[0] > self.x_range[1] + self.padding - self.x_range[2] or p[1] <", "color=YELLOW, x_range=[-7, 7, 1], y_range=[-4, 4, 1], stroke_width=3, virtual_time=1, #", "stream line shown while being animated rate_func The rate function", "pos: ((pos[0] * UR + pos[1] * LEFT) - pos)", "clutter. opacity The opacity of the arrows. vector_config Additional arguments", "AnimationGroup: \"\"\"The creation animation of the stream lines. The stream" ]
[ "marshmallow schema as the 'Schema' attribute in a dataclass. It", "specified order = True in the Meta class OrderedDict([('height', None),", "= citySchema.dump(city) >>> city_json['best_building'] # We get an OrderedDict because", "dataclass field is used as arguments to the marshmallow Field.", "to change the class used for generic types below type_mapping", "type), you can pass it as the ``marshmallow_field`` key in", "Email = NewType('Email', str, field=marshmallow.fields.Email) >>> @dataclass ... class ContactInfo:", ">>> @dataclasses.dataclass() ... class C: ... important: int = dataclasses.field(init=True,", "unsafe_hash=unsafe_hash, frozen=frozen ) if _cls is None: return lambda cls:", "default) else: metadata.setdefault(\"required\", True) # If the field was already", "class_schema(Building) # Returns a marshmallow schema class (not an instance)", "# noinspection PyDataclass fields: Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz) except TypeError:", "default=0) ... >>> c = class_schema(C)().load({ ... \"important\": 9, #", "\"\"\" # dataclass's typing doesn't expect it to be called", "\"\"\" This decorator adds a marshmallow schema as the 'Schema'", "None else dict(metadata) if default is not marshmallow.missing: metadata.setdefault(\"default\", default)", "one. >>> @dataclasses.dataclass ... class Anything: ... name: str ...", "import marshmallow import typing_inspect __all__ = [\"dataclass\", \"add_schema\", \"class_schema\", \"field_for_schema\",", "for generic types below type_mapping = base_schema.TYPE_MAPPING if base_schema else", "default value >>> _get_field_default(dataclasses.field()) <marshmallow.missing> \"\"\" # Remove `type: ignore`", "with an # underscore. The presence of _cls is used", "if typ is Any: metadata.setdefault(\"allow_none\", True) return marshmallow.fields.Raw(**metadata) # Generic", "imported ... \"unimportant\": 9 # This field will NOT be", "= None ) -> Type[marshmallow.Schema]: \"\"\" Convert a class to", "field constructor :param base_schema: marshmallow schema used as a base", "be imported ... }, unknown=marshmallow.EXCLUDE) >>> c C(important=9, unimportant=0) >>>", "= [] for meta_dict in (typ_args, metadata): if \"validate\" in", "... @marshmallow.validates_schema ... def validate(self, data, **_): ... raise marshmallow.ValidationError('never", "# A parameter to pass to marshmallow's field }) website:str", "of a field. If you want to use a custom", "dictionary of a field. If you want to use a", "order ... class Point: ... x:float ... y:float ... Schema:", "schema for it. Simple example:: from marshmallow import Schema from", "types below type_mapping = base_schema.TYPE_MAPPING if base_schema else {} if", "dict_type( keys=field_for_schema(arguments[0], base_schema=base_schema), values=field_for_schema(arguments[1], base_schema=base_schema), **metadata, ) elif typing_inspect.is_optional_type(typ): subtyp", "return field_for_schema(subtyp, metadata=metadata, base_schema=base_schema) elif typing_inspect.is_union_type(typ): from . import union_field", "def on_bind_field(self, field_name, field_obj): ... field_obj.data_key = (field_obj.data_key or field_name).upper()", "a `.Schema` attribute to the class object :param base_schema: marshmallow", "-> Optional[Type[marshmallow.fields.Field]]: return ( base_schema and base_schema.TYPE_MAPPING.get(typ) ) or marshmallow.Schema.TYPE_MAPPING.get(typ)", "Simple example:: from marshmallow import Schema from marshmallow_dataclass import dataclass", "field constructor. >>> import marshmallow.validate >>> IPv4 = NewType('IPv4', str,", "base class when deriving dataclass schema >>> int_field = field_for_schema(int,", "This field will be imported ... \"unimportant\": 9 # This", "for subtyp in arguments ], **metadata, ) # typing.NewType returns", "the given python type. The metadata of the dataclass field", "many = self.many if many is None else bool(many) if", "<marshmallow.missing> \"\"\" # Remove `type: ignore` when https://github.com/python/mypy/issues/6910 is fixed", "typing import ( Any, Callable, Dict, List, Mapping, Optional, Set,", "= False, unsafe_hash: bool = False, frozen: bool = False,", "= { \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field }) Schema:", "when the field is missing :param metadata: Additional parameters to", "in the __init__ method will be added: ... unimportant: int", "name: str = dataclasses.field(default=\"anonymous\") ... class Meta: ... ordered =", "MyIps(ips=['127.0.0.1']) >>> Email = NewType('Email', str, field=marshmallow.fields.Email) >>> @dataclass ...", "dataclass members that will be copied to generated schema. MEMBERS_WHITELIST:", "in all_loaded] else: return clazz(**all_loaded) return BaseSchema def _get_field_default(field: dataclasses.Field):", "clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: try:", "if this # decorator is being called with parameters or", "must not be set for required fields. if not metadata.get(\"required\"):", "It adds a `.Schema` attribute to the class object :param", "from marshmallow import Schema from dataclasses import field from marshmallow_dataclass", "conversion of python 3.7's :mod:`dataclasses` to :mod:`marshmallow` schemas. It takes", "marshmallow Field. :param typ: The type for which a field", "], **metadata, ) # typing.NewType returns a function with a", "on_bind_field(self, field_name, field_obj): ... field_obj.data_key = (field_obj.data_key or field_name).upper() >>>", "... >>> citySchema = class_schema(City)() >>> city = citySchema.load({\"name\":\"Paris\", \"best_building\":", "def add_schema( base_schema: Type[marshmallow.Schema] = None, ) -> Callable[[Type[_U]], Type[_U]]:", "python 3.7's :mod:`dataclasses` to :mod:`marshmallow` schemas. It takes a python", "= dataclasses.dataclass( # type: ignore _cls, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash,", "metadata): if \"validate\" in meta_dict: if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"]) elif callable(meta_dict[\"validate\"]):", "many is None else bool(many) if many: return [clazz(**loaded) for", "\"\"\"NewType creates simple unique types to which you can attach", "citySchema.dump(city) >>> city_json['best_building'] # We get an OrderedDict because we", "add_schema(_cls: Type[_U]) -> Type[_U]: ... @overload def add_schema( base_schema: Type[marshmallow.Schema]", "field: return field(**metadata) if typ is Any: metadata.setdefault(\"allow_none\", True) return", "Anything: ... name: str ... @marshmallow.validates('name') ... def validates(self, value):", "the marshmallow field constructor. >>> import marshmallow.validate >>> IPv4 =", "citySchema.load({\"name\":\"Paris\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'best_building': ['Missing", "tuple_type(children, **metadata) elif origin in (dict, Dict): dict_type = type_mapping.get(Dict,", "a None default metadata[\"default\"] = metadata.get(\"default\", None) metadata[\"missing\"] = metadata.get(\"missing\",", "'marshmallow.fields.Url'> \"\"\" metadata = {} if metadata is None else", "y:float point = Point(x=0, y=0) point_json = Point.Schema().dumps(point) Full example::", "for it too. ... other_buildings: List[Building] = dataclasses.field(default_factory=lambda: []) ...", "Point.Schema().dumps(point) Full example:: from marshmallow import Schema from dataclasses import", "base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: \"\"\" Base schema", "{'url': ['Not a valid URL.']} >>> @dataclasses.dataclass ... class NeverValid:", "can be passed in the `metadata` dictionary of a field.", "to marshmallow's field }) website:str = field(metadata = { \"marshmallow_field\":", ">>> class BaseSchema(marshmallow.Schema): ... def on_bind_field(self, field_name, field_obj): ... field_obj.data_key", "constructor. >>> import marshmallow.validate >>> IPv4 = NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$'))", "load(self, data: Mapping, *, many: bool = None, **kwargs): all_loaded", "self.many if many is None else bool(many) if many: return", "type, base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: \"\"\" Convert", "_get_field_default(field: dataclasses.Field): \"\"\" Return a marshmallow default value given a", "= None, base_schema: Optional[Type[marshmallow.Schema]] = None, ) -> marshmallow.fields.Field: \"\"\"", "whitelisted members of the dataclass to the schema. attributes =", "ips: List[IPv4] >>> MyIps.Schema().load({\"ips\": [\"0.0.0.0\", \"grumble grumble\"]}) Traceback (most recent", "base_schema) @overload def add_schema(_cls: Type[_U]) -> Type[_U]: ... @overload def", "Type[_U]: clazz.Schema = class_schema(clazz, base_schema) # type: ignore return clazz", "children = tuple( field_for_schema(arg, base_schema=base_schema) for arg in arguments )", "Optional[Type[marshmallow.fields.Field]]: return ( base_schema and base_schema.TYPE_MAPPING.get(typ) ) or marshmallow.Schema.TYPE_MAPPING.get(typ) def", "arguments = typing_inspect.get_args(typ, True) # Override base_schema.TYPE_MAPPING to change the", "type: ignore def load(self, data: Mapping, *, many: bool =", ">>> city_json['best_building'] # We get an OrderedDict because we specified", "def add_schema( _cls: Type[_U], base_schema: Type[marshmallow.Schema] = None ) ->", ") # typing.NewType returns a function with a __supertype__ attribute", "forward_reference or class_schema(typ, base_schema=base_schema) ) return marshmallow.fields.Nested(nested, **metadata) def _base_schema(", "type checker \"\"\" import inspect from enum import EnumMeta from", "... name: str ... @marshmallow.validates('name') ... def validates(self, value): ...", "[\"dataclass\", \"add_schema\", \"class_schema\", \"field_for_schema\", \"NewType\"] NoneType = type(None) _U =", "in the Meta class OrderedDict([('height', None), ('name', 'Eiffel Tower')]) >>>", "Mapping, *, many: bool = None, **kwargs): all_loaded = super().load(data,", ") return marshmallow.fields.Nested(nested, **metadata) def _base_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]]", "-> Callable[[_U], _U]: \"\"\"NewType creates simple unique types to which", "{1: ['String does not match expected pattern.']}} >>> MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]})", "last): ... marshmallow.exceptions.ValidationError: {'mail': ['Not a valid email address.']} \"\"\"", "deriving dataclass schema >>> int_field = field_for_schema(int, default=9, metadata=dict(required=True)) >>>", "arguments if typ is list: typ = List[Any] elif typ", "dataclass and cannot be turned into one. >>> @dataclasses.dataclass ...", "type, base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: \"\"\" Base", "a marshmallow Field corresponding to the given python type. The", "name: str = dataclasses.field(default=\"Anonymous\") ... friends: List['Person'] = dataclasses.field(default_factory=lambda:[]) #", "def _field_by_supertype( typ: Type, default: marshmallow.missing, newtype_supertype: Type, metadata: dict,", "Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast, overload, )", "typing.NewType('Meters', float) >>> @dataclasses.dataclass() ... class Building: ... height: Optional[Meters]", "one.\" ) # Copy all marshmallow hooks and whitelisted members", ":param metadata: Additional parameters to pass to the marshmallow field", "city = citySchema.load({\"name\":\"Paris\", \"best_building\": {\"name\": \"Eiffel Tower\"}}) >>> city City(name='Paris',", "want to use a custom marshmallow field (one that has", "metadata = {} if metadata is None else dict(metadata) if", "This decorator does the same as dataclasses.dataclass, but also applies", "as a base class when deriving dataclass schema >>> int_field", "use for (de)serialization when the field is missing :param metadata:", "type(None) _U = TypeVar(\"_U\") # Whitelist of dataclass members that", "if t is not NoneType) # type: ignore # Treat", "for which a field should be generated :param default: value", "recent call last): ... marshmallow.exceptions.ValidationError: {'_schema': ['never valid']} >>> #", "= field_for_schema(arguments[0], base_schema=base_schema) list_type = type_mapping.get(List, marshmallow.fields.List) return list_type(child_type, **metadata)", "... @overload def add_schema( base_schema: Type[marshmallow.Schema] = None, ) ->", "}) Schema: ClassVar[Type[Schema]] = Schema # For the type checker", ">>> citySchema = class_schema(City)() >>> city = citySchema.load({\"name\":\"Paris\", \"best_building\": {\"name\":", "from NewType) \"\"\" # Add the information coming our custom", "Optional[Type[marshmallow.Schema]] = None, ) -> marshmallow.fields.Field: \"\"\" Get a marshmallow", "If the field was already defined by the user predefined_field", "= base_schema.TYPE_MAPPING if base_schema else {} if origin in (list,", "typ is Any: metadata.setdefault(\"allow_none\", True) return marshmallow.fields.Raw(**metadata) # Generic types", "\"important\": 9, # This field will be imported ... \"unimportant\":", "attributes. All the keyword arguments passed to this function will", "int_field.__class__ <class 'marshmallow.fields.Integer'> >>> int_field.default 9 >>> field_for_schema(str, metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__", "= type_mapping.get(Tuple, marshmallow.fields.Tuple) return tuple_type(children, **metadata) elif origin in (dict,", "schema factory that creates a schema for `clazz` derived either", "used to detect if this # decorator is being called", "`.Schema` attribute to the class object :param base_schema: marshmallow schema", "marshmallow's field }) website:str = field(metadata = { \"marshmallow_field\": marshmallow.fields.Url()", "bool = False, unsafe_hash: bool = False, frozen: bool =", "Type, metadata: dict, base_schema: Optional[Type[marshmallow.Schema]], ) -> marshmallow.fields.Field: \"\"\" Return", "{ \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field }) Schema: ClassVar[Type[Schema]]", "ignore _cls, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen ) if _cls", "**metadata) def _base_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None )", "other_buildings=[]) >>> citySchema.load({\"name\":\"Paris\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError:", "@dataclasses.dataclass() ... class City: ... name: str = dataclasses.field(metadata={'required':True}) ...", "}) website:str = field(metadata = { \"marshmallow_field\": marshmallow.fields.Url() # Custom", "`metadata`. # See https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators: List[Callable] = [] for meta_dict", "class to a marshmallow schema :param clazz: A python class", "metadata[\"required\"] = False return field_for_schema(subtyp, metadata=metadata, base_schema=base_schema) elif typing_inspect.is_union_type(typ): from", "is None: return lambda cls: add_schema(dc(cls), base_schema) return add_schema(dc, base_schema)", "\"Eiffel Tower\"}}) >>> city City(name='Paris', best_building=Building(height=None, name='Eiffel Tower'), other_buildings=[]) >>>", "class_schema(typ, base_schema=base_schema) ) return marshmallow.fields.Nested(nested, **metadata) def _base_schema( clazz: type,", "c = class_schema(C)().load({ ... \"important\": 9, # This field will", "both `typ` and `metadata`. # See https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators: List[Callable] =", "= dataclasses.field(init=False, default=0) ... >>> c = class_schema(C)().load({ ... \"important\":", "typ is dict: typ = Dict[Any, Any] # Base types", "new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"] = new_validators if new_validators else None metadata =", "address.']} \"\"\" def new_type(x: _U): return x new_type.__name__ = name", "line can be statically type checked Point(x=0.0, y=0.0) \"\"\" #", "_cls is None: return lambda cls: add_schema(dc(cls), base_schema) return add_schema(dc,", "repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen ) if _cls is None:", "= Point(x=0, y=0) point_json = Point.Schema().dumps(point) Full example:: from marshmallow", "marshmallow Schema corresponding to the dataclass .. note:: All the", "= (field_obj.data_key or field_name).upper() >>> @add_schema(base_schema=BaseSchema) ... @dataclasses.dataclass ... class", "import Schema from marshmallow_dataclass import dataclass @dataclass class Point: x:float", "a valid URL.']} >>> @dataclasses.dataclass ... class NeverValid: ... @marshmallow.validates_schema", "`BaseSchema` \"\"\" # Remove `type: ignore` when mypy handles dynamic", "class when deriving dataclass schema >>> class BaseSchema(marshmallow.Schema): ... def", ">>> _get_field_default(dataclasses.field()) <marshmallow.missing> \"\"\" # Remove `type: ignore` when https://github.com/python/mypy/issues/6910", "frozen=frozen ) if _cls is None: return lambda cls: add_schema(dc(cls),", "the __init__ method will be added: ... unimportant: int =", "marshmallow.fields.Dict) return dict_type( keys=field_for_schema(arguments[0], base_schema=base_schema), values=field_for_schema(arguments[1], base_schema=base_schema), **metadata, ) elif", "field order ... class Point: ... x:float ... y:float ...", "= None, **kwargs): all_loaded = super().load(data, many=many, **kwargs) many =", "type_mapping.get(List, marshmallow.fields.List) return list_type(child_type, **metadata) if origin in (tuple, Tuple):", "= class_schema(Person)().load({ ... \"friends\": [{\"name\": \"<NAME>\"}] ... }) >>> person", "dataclass try: return class_schema(dataclasses.dataclass(clazz), base_schema) except Exception: raise TypeError( f\"{getattr(clazz,", "# Whitelist of dataclass members that will be copied to", "} # Update the schema members to contain marshmallow fields", "field_for_schema( field.type, _get_field_default(field), field.metadata, base_schema ), ) for field in", "... class Person: ... name: str = dataclasses.field(default=\"Anonymous\") ... friends:", "}, unknown=marshmallow.EXCLUDE) >>> c C(important=9, unimportant=0) >>> @dataclasses.dataclass ... class", "**kwargs) many = self.many if many is None else bool(many)", "hooks and whitelisted members of the dataclass to the schema.", "elif origin in (dict, Dict): dict_type = type_mapping.get(Dict, marshmallow.fields.Dict) return", "typ.__name__, **typ_args, **metadata} field = getattr(typ, \"_marshmallow_field\", None) if field:", "best_building: Building # Reference to another dataclasses. A schema will", "type arguments if typ is list: typ = List[Any] elif", "keys=field_for_schema(arguments[0], base_schema=base_schema), values=field_for_schema(arguments[1], base_schema=base_schema), **metadata, ) elif typing_inspect.is_optional_type(typ): subtyp =", "OrderedDict because we specified order = True in the Meta", "too long\") >>> class_schema(Anything)().load({\"name\": \"aaaaaargh\"}) Traceback (most recent call last):", ") elif typing_inspect.is_optional_type(typ): subtyp = next(t for t in arguments", "marshmallow schema for it. Simple example:: from marshmallow import Schema", "schema_class = type(clazz.__name__, (_base_schema(clazz, base_schema),), attributes) return cast(Type[marshmallow.Schema], schema_class) def", "dataclasses.MISSING: return marshmallow.missing return field.default def NewType( name: str, typ:", "the 'Schema' attribute in a dataclass. It uses :func:`class_schema` internally.", "= Artist.Schema().loads('{\"NAMES\": [\"Martin\", \"Ramirez\"]}') >>> artist Artist(names=('Martin', 'Ramirez')) \"\"\" def", "_field_by_type( typ: Union[type, Any], base_schema: Optional[Type[marshmallow.Schema]] ) -> Optional[Type[marshmallow.fields.Field]]: return", "class when deriving dataclass schema >>> @dataclass ... class Artist:", "base_schema=base_schema) for arg in arguments ) tuple_type = type_mapping.get(Tuple, marshmallow.fields.Tuple)", "# enumerations if isinstance(typ, EnumMeta): import marshmallow_enum return marshmallow_enum.EnumField(typ, **metadata)", "from marshmallow import Schema from marshmallow_dataclass import dataclass @dataclass class", "that are in the __init__ method will be added: ...", "Type[_U]) -> Type[_U]: ... @overload def add_schema( base_schema: Type[marshmallow.Schema] =", "deriving dataclass schema >>> class BaseSchema(marshmallow.Schema): ... def on_bind_field(self, field_name,", "cls: The dataclass to which a Schema should be added", "copied to generated schema. MEMBERS_WHITELIST: Set[str] = {\"Meta\"} # Max", "to generated schema. MEMBERS_WHITELIST: Set[str] = {\"Meta\"} # Max number", ">>> class_schema(NeverValid)().load({}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'_schema':", "import typing_inspect __all__ = [\"dataclass\", \"add_schema\", \"class_schema\", \"field_for_schema\", \"NewType\"] NoneType", "'Eiffel Tower')]) >>> @dataclasses.dataclass() ... class Person: ... name: str", "return add_schema(dc, base_schema) @overload def add_schema(_cls: Type[_U]) -> Type[_U]: ...", "is not dataclasses.MISSING: return default_factory elif field.default is dataclasses.MISSING: return", "# _cls should never be specified by keyword, so start", "field: Optional[Type[marshmallow.fields.Field]] = None, **kwargs, ) -> Callable[[_U], _U]: \"\"\"NewType", "marshmallow.exceptions.ValidationError: {'name': ['Name too long']} \"\"\" return _proxied_class_schema(clazz, base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE)", "field (one that has no equivalent python type), you can", "values=field_for_schema(arguments[1], base_schema=base_schema), **metadata, ) elif typing_inspect.is_optional_type(typ): subtyp = next(t for", "# We get an OrderedDict because we specified order =", "dataclass schema >>> @dataclass ... class Artist: ... name: str", "added: ... unimportant: int = dataclasses.field(init=False, default=0) ... >>> c", "(de)serialization when the field is missing :param metadata: Additional parameters", "getattr(typ, \"_marshmallow_args\", {}) # Handle multiple validators from both `typ`", "Generic types specified without type arguments if typ is list:", "validate(self, data, **_): ... raise marshmallow.ValidationError('never valid') ... >>> class_schema(NeverValid)().load({})", ") -> Optional[Type[marshmallow.fields.Field]]: return ( base_schema and base_schema.TYPE_MAPPING.get(typ) ) or", "add_schema( _cls: Type[_U], base_schema: Type[marshmallow.Schema] = None ) -> Type[_U]:", "k, v in inspect.getmembers(clazz) if hasattr(v, \"__marshmallow_hook__\") or k in", "getattr(typ, \"__forward_arg__\", None) nested = ( nested_schema or forward_reference or", "dataclass nested_schema = getattr(typ, \"Schema\", None) # Nested dataclasses forward_reference", "recent call last): ... marshmallow.exceptions.ValidationError: {'mail': ['Not a valid email", "schema_class) def _field_by_type( typ: Union[type, Any], base_schema: Optional[Type[marshmallow.Schema]] ) ->", "default=marshmallow.missing, metadata: Mapping[str, Any] = None, base_schema: Optional[Type[marshmallow.Schema]] = None,", "(not an instance) <class 'marshmallow.schema.Building'> >>> @dataclasses.dataclass() ... class City:", "last): ... marshmallow.exceptions.ValidationError: {'_schema': ['never valid']} >>> # noinspection PyTypeChecker", "for (de)serialization when the field is missing :param metadata: Additional", "function, so ignore type check dc = dataclasses.dataclass( # type:", "Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'mail': ['Not a", ":return: A marshmallow Schema corresponding to the dataclass .. note::", "list_type = type_mapping.get(List, marshmallow.fields.List) return list_type(child_type, **metadata) if origin in", "schemas. It takes a python class, and generates a marshmallow", "(most recent call last): ... marshmallow.exceptions.ValidationError: {'best_building': ['Missing data for", "Exception: raise TypeError( f\"{getattr(clazz, '__name__', repr(clazz))} is not a dataclass", "marshmallow hooks and whitelisted members of the dataclass to the", "= type_mapping.get(Dict, marshmallow.fields.Dict) return dict_type( keys=field_for_schema(arguments[0], base_schema=base_schema), values=field_for_schema(arguments[1], base_schema=base_schema), **metadata,", "change the class used for generic types below type_mapping =", "dataclasses.field(default=\"Anonymous\") ... friends: List['Person'] = dataclasses.field(default_factory=lambda:[]) # Recursive field ...", "to contain marshmallow fields instead of dataclass fields attributes.update( (", "base_schema=base_schema) ) return marshmallow.fields.Nested(nested, **metadata) def _base_schema( clazz: type, base_schema:", "use a custom marshmallow field (one that has no equivalent", "We get an OrderedDict because we specified order = True", "return class_schema(dataclasses.dataclass(clazz), base_schema) except Exception: raise TypeError( f\"{getattr(clazz, '__name__', repr(clazz))}", "Returns a marshmallow schema class (not an instance) <class 'marshmallow.schema.Building'>", "person = class_schema(Person)().load({ ... \"friends\": [{\"name\": \"<NAME>\"}] ... }) >>>", "Traceback (most recent call last): ... TypeError: None is not", "= { k: v for k, v in inspect.getmembers(clazz) if", ":param default: value to use for (de)serialization when the field", "statically type checked Point(x=0.0, y=0.0) \"\"\" # dataclass's typing doesn't", "the type checker ... >>> Point.Schema().load({'x':0, 'y':0}) # This line", "else bool(many) if many: return [clazz(**loaded) for loaded in all_loaded]", "ignore return new_type if __name__ == \"__main__\": import doctest doctest.testmod(verbose=True)", "Type, TypeVar, Union, cast, overload, ) import dataclasses import marshmallow", "\"\"\" Return a new field for fields based on a", "\"\"\" def new_type(x: _U): return x new_type.__name__ = name new_type.__supertype__", "... marshmallow.exceptions.ValidationError: {'best_building': ['Missing data for required field.']} >>> city_json", "**typ_args, **metadata} field = getattr(typ, \"_marshmallow_field\", None) if field: return", "ignore` when https://github.com/python/mypy/issues/6910 is fixed default_factory = field.default_factory # type:", "typing >>> Meters = typing.NewType('Meters', float) >>> @dataclasses.dataclass() ... class", "in arguments ], **metadata, ) # typing.NewType returns a function", "= TypeVar(\"_U\") # Whitelist of dataclass members that will be", "field is used as arguments to the marshmallow Field. :param", "9 >>> field_for_schema(str, metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__ <class 'marshmallow.fields.Url'> \"\"\" metadata =", "to the marshmallow field constructor :param base_schema: marshmallow schema used", "by marshmallow field classes can be passed in the `metadata`", "number of generated schemas that class_schema keeps of generated schemas.", "base_schema.TYPE_MAPPING to change the class used for generic types below", "origin = typing_inspect.get_origin(typ) if origin: arguments = typing_inspect.get_args(typ, True) #", "def _base_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None ) ->", "Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'ips': {1: ['String", "good URL !\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError:", "schema >>> int_field = field_for_schema(int, default=9, metadata=dict(required=True)) >>> int_field.__class__ <class", "name: str = dataclasses.field(metadata={'required':True}) ... best_building: Building # Reference to", "**_): ... raise marshmallow.ValidationError('never valid') ... >>> class_schema(NeverValid)().load({}) Traceback (most", "elif typ is dict: typ = Dict[Any, Any] # Base", "The presence of _cls is used to detect if this", "{'name': ['Name too long']} \"\"\" return _proxied_class_schema(clazz, base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def", "loaded in all_loaded] else: return clazz(**all_loaded) return BaseSchema def _get_field_default(field:", "None: return lambda cls: add_schema(dc(cls), base_schema) return add_schema(dc, base_schema) @overload", "equivalent python type), you can pass it as the ``marshmallow_field``", "the dataclass field is used as arguments to the marshmallow", "clazz(**all_loaded) return BaseSchema def _get_field_default(field: dataclasses.Field): \"\"\" Return a marshmallow", "= dataclasses.field(default_factory=lambda: []) ... >>> citySchema = class_schema(City)() >>> city", "base class when deriving dataclass schema :return: A marshmallow Schema", "Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'name': ['Name too", "``marshmallow_field`` key in the metadata dictionary. >>> import typing >>>", "getattr(typ, \"_marshmallow_field\", None) if field: return field(**metadata) else: return field_for_schema(", "dataclass schema :return: A marshmallow Schema corresponding to the dataclass", "@dataclasses.dataclass ... class Artist: ... names: Tuple[str, str] >>> artist", "Tuple): children = tuple( field_for_schema(arg, base_schema=base_schema) for arg in arguments", "inspect from enum import EnumMeta from functools import lru_cache from", "class Artist: ... names: Tuple[str, str] >>> artist = Artist.Schema().loads('{\"NAMES\":", ">>> c C(important=9, unimportant=0) >>> @dataclasses.dataclass ... class Website: ...", "marshmallow.exceptions.ValidationError: {'best_building': ['Missing data for required field.']} >>> city_json =", "= field_for_schema(int, default=9, metadata=dict(required=True)) >>> int_field.__class__ <class 'marshmallow.fields.Integer'> >>> int_field.default", "typing_inspect.is_optional_type(typ): subtyp = next(t for t in arguments if t", "as types with a None default metadata[\"default\"] = metadata.get(\"default\", None)", "base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: \"\"\" Convert a", ">>> import typing >>> Meters = typing.NewType('Meters', float) >>> @dataclasses.dataclass()", "The metadata of the dataclass field is used as arguments", "# Nested marshmallow dataclass nested_schema = getattr(typ, \"Schema\", None) #", "dataclasses import field from marshmallow_dataclass import dataclass import datetime @dataclass", "is dataclasses.MISSING: return marshmallow.missing return field.default def NewType( name: str,", "= typing_inspect.get_args(typ, True) # Override base_schema.TYPE_MAPPING to change the class", "bool = False, frozen: bool = False, base_schema: Optional[Type[marshmallow.Schema]] =", "C(important=9, unimportant=0) >>> @dataclasses.dataclass ... class Website: ... url:str =", "default metadata[\"default\"] = metadata.get(\"default\", None) metadata[\"missing\"] = metadata.get(\"missing\", None) metadata[\"required\"]", ">>> @dataclass ... class Artist: ... name: str >>> Artist.Schema", "frozen: bool = False, base_schema: Optional[Type[marshmallow.Schema]] = None, ): \"\"\"", "= Schema # For the type checker ... >>> Point.Schema().load({'x':0,", "field_name).upper() >>> @add_schema(base_schema=BaseSchema) ... @dataclasses.dataclass ... class Artist: ... names:", "= metadata.get(\"default\", None) metadata[\"missing\"] = metadata.get(\"missing\", None) metadata[\"required\"] = False", "metadata = {\"description\": typ.__name__, **typ_args, **metadata} field = getattr(typ, \"_marshmallow_field\",", "created for it too. ... other_buildings: List[Building] = dataclasses.field(default_factory=lambda: [])", "attributes) return cast(Type[marshmallow.Schema], schema_class) def _field_by_type( typ: Union[type, Any], base_schema:", "return field(**metadata) else: return field_for_schema( newtype_supertype, metadata=metadata, default=default, base_schema=base_schema, )", "ClassVar[Type[Schema]] = Schema # For the type checker ... >>>", "# type: ignore new_type._marshmallow_field = field # type: ignore new_type._marshmallow_args", "def decorator(clazz: Type[_U]) -> Type[_U]: clazz.Schema = class_schema(clazz, base_schema) #", "field ... }) ... >>> class_schema(Website)().load({\"url\": \"I am not a", "marshmallow.exceptions.ValidationError: {'_schema': ['never valid']} >>> # noinspection PyTypeChecker >>> class_schema(None)", "class_schema(Website)().load({\"url\": \"I am not a good URL !\"}) Traceback (most", "... name: str = dataclasses.field(default=\"anonymous\") ... class Meta: ... ordered", "Union[type, Any], base_schema: Optional[Type[marshmallow.Schema]] ) -> Optional[Type[marshmallow.fields.Field]]: return ( base_schema", "all marshmallow hooks and whitelisted members of the dataclass to", ">>> IPv4 = NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>> @dataclass ... class", "Website: ... url:str = dataclasses.field(metadata = { ... \"marshmallow_field\": marshmallow.fields.Url()", "field is missing :param metadata: Additional parameters to pass to", "NewType( name: str, typ: Type[_U], field: Optional[Type[marshmallow.fields.Field]] = None, **kwargs,", "= True in the Meta class OrderedDict([('height', None), ('name', 'Eiffel", "t is not NoneType) # type: ignore # Treat optional", "class Point: ... x:float ... y:float ... Schema: ClassVar[Type[Schema]] =", "\"grumble grumble\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'mail':", "... unimportant: int = dataclasses.field(init=False, default=0) ... >>> c =", "fields attributes.update( ( field.name, field_for_schema( field.type, _get_field_default(field), field.metadata, base_schema ),", "@add_schema(base_schema=BaseSchema) ... @dataclasses.dataclass ... class Artist: ... names: Tuple[str, str]", "City(name='Paris', best_building=Building(height=None, name='Eiffel Tower'), other_buildings=[]) >>> citySchema.load({\"name\":\"Paris\"}) Traceback (most recent", "A marshmallow Schema corresponding to the dataclass .. note:: All", "Tower'), other_buildings=[]) >>> citySchema.load({\"name\":\"Paris\"}) Traceback (most recent call last): ...", "type_mapping.get(Tuple, marshmallow.fields.Tuple) return tuple_type(children, **metadata) elif origin in (dict, Dict):", "str = dataclasses.field(default=\"anonymous\") ... class Meta: ... ordered = True", "dataclass schema >>> class BaseSchema(marshmallow.Schema): ... def on_bind_field(self, field_name, field_obj):", "schemas that class_schema keeps of generated schemas. Removes duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE", "'missing' must not be set for required fields. if not", "is missing :param metadata: Additional parameters to pass to the", "predefined_field = metadata.get(\"marshmallow_field\") if predefined_field: return predefined_field # Generic types", "base_schema: Type[marshmallow.Schema] = None ) -> Type[_U]: ... def add_schema(_cls=None,", ") tuple_type = type_mapping.get(Tuple, marshmallow.fields.Tuple) return tuple_type(children, **metadata) elif origin", "meta_dict: if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"]) elif callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"] = new_validators", "= type(clazz.__name__, (_base_schema(clazz, base_schema),), attributes) return cast(Type[marshmallow.Schema], schema_class) def _field_by_type(", "Dict): dict_type = type_mapping.get(Dict, marshmallow.fields.Dict) return dict_type( keys=field_for_schema(arguments[0], base_schema=base_schema), values=field_for_schema(arguments[1],", "the class used for generic types below type_mapping = base_schema.TYPE_MAPPING", "typ: The type for which a field should be generated", "return marshmallow.fields.Raw(**metadata) # Generic types origin = typing_inspect.get_origin(typ) if origin:", "\"\"\" Convert a class to a marshmallow schema :param clazz:", "and `metadata`. # See https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators: List[Callable] = [] for", "Get a marshmallow Field corresponding to the given python type.", "order = True in the Meta class OrderedDict([('height', None), ('name',", "type(clazz.__name__, (_base_schema(clazz, base_schema),), attributes) return cast(Type[marshmallow.Schema], schema_class) def _field_by_type( typ:", "validators from both `typ` and `metadata`. # See https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators:", "MEMBERS_WHITELIST } # Update the schema members to contain marshmallow", "Building # Reference to another dataclasses. A schema will be", "user predefined_field = metadata.get(\"marshmallow_field\") if predefined_field: return predefined_field # Generic", "import datetime @dataclass class User: birth: datetime.date = field(metadata= {", "# This field will NOT be imported ... }, unknown=marshmallow.EXCLUDE)", "dataclasses.field(metadata = { ... \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field", ":param clazz: A python class (may be a dataclass) :param", "Add the information coming our custom NewType implementation typ_args =", "else dict(metadata) if default is not marshmallow.missing: metadata.setdefault(\"default\", default) #", "x new_type.__name__ = name new_type.__supertype__ = typ # type: ignore", "subtyp = next(t for t in arguments if t is", "point = Point(x=0, y=0) point_json = Point.Schema().dumps(point) Full example:: from", "inspect.isfunction(typ): return _field_by_supertype( typ=typ, default=default, newtype_supertype=newtype_supertype, metadata=metadata, base_schema=base_schema, ) #", "lru_cache from typing import ( Any, Callable, Dict, List, Mapping,", "dataclass) :param base_schema: marshmallow schema used as a base class", "the schema. attributes = { k: v for k, v", "checker \"\"\" import inspect from enum import EnumMeta from functools", "dataclass to the schema. attributes = { k: v for", "type_mapping = base_schema.TYPE_MAPPING if base_schema else {} if origin in", "order=order, unsafe_hash=unsafe_hash, frozen=frozen ) if _cls is None: return lambda", "Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'url': ['Not a", "of the dataclass to the schema. attributes = { k:", "import marshmallow.validate >>> IPv4 = NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>> @dataclass", "this function will be transmitted to the marshmallow field constructor.", ">>> @dataclasses.dataclass() ... class City: ... name: str = dataclasses.field(metadata={'required':True})", "Only fields that are in the __init__ method will be", "= True ... >>> class_schema(Building) # Returns a marshmallow schema", "), ) for field in fields if field.init ) schema_class", ">>> Artist.Schema <class 'marshmallow.schema.Artist'> >>> from typing import ClassVar >>>", "typ = Dict[Any, Any] # Base types field = _field_by_type(typ,", "the ``marshmallow_field`` key in the metadata dictionary. >>> import typing", "class BaseSchema(base_schema or marshmallow.Schema): # type: ignore def load(self, data:", "\"add_schema\", \"class_schema\", \"field_for_schema\", \"NewType\"] NoneType = type(None) _U = TypeVar(\"_U\")", "the arguments supported by marshmallow field classes can be passed", "from dataclasses import field from marshmallow_dataclass import dataclass import datetime", "datetime @dataclass class User: birth: datetime.date = field(metadata= { \"required\":", "super field. (Usually spawned from NewType) \"\"\" # Add the", "# unsupported type Traceback (most recent call last): ... TypeError:", "newtype_supertype = getattr(typ, \"__supertype__\", None) if newtype_supertype and inspect.isfunction(typ): return", "recent call last): ... TypeError: None is not a dataclass", "NOT be imported ... }, unknown=marshmallow.EXCLUDE) >>> c C(important=9, unimportant=0)", "\"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field }) Schema: ClassVar[Type[Schema]] =", "ignore` when mypy handles dynamic base classes # https://github.com/python/mypy/issues/2813 class", "\"_marshmallow_field\", None) if field: return field(**metadata) else: return field_for_schema( newtype_supertype,", "clazz return decorator(_cls) if _cls else decorator def class_schema( clazz:", "pass to the marshmallow field constructor :param base_schema: marshmallow schema", "= dataclasses.field(init=True, default=0) ... # Only fields that are in", "new_validators.extend(meta_dict[\"validate\"]) elif callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"] = new_validators if new_validators else", "validates(self, value): ... if len(value) > 5: raise marshmallow.ValidationError(\"Name too", "metadata: dict, base_schema: Optional[Type[marshmallow.Schema]], ) -> marshmallow.fields.Field: \"\"\" Return a", "len(value) > 5: raise marshmallow.ValidationError(\"Name too long\") >>> class_schema(Anything)().load({\"name\": \"aaaaaargh\"})", "check dc = dataclasses.dataclass( # type: ignore _cls, repr=repr, eq=eq,", "= metadata.get(\"marshmallow_field\") if predefined_field: return predefined_field # Generic types specified", "str = dataclasses.field(metadata={'required':True}) ... best_building: Building # Reference to another", "getattr(typ, \"__supertype__\", None) if newtype_supertype and inspect.isfunction(typ): return _field_by_supertype( typ=typ,", "Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz) except TypeError: # Not a dataclass", "= 1024 # _cls should never be specified by keyword,", "ClassVar[Type[Schema]] = Schema # For the type checker \"\"\" import", "# Only fields that are in the __init__ method will", "dataclass schema >>> int_field = field_for_schema(int, default=9, metadata=dict(required=True)) >>> int_field.__class__", "underscore. The presence of _cls is used to detect if", "class BaseSchema(marshmallow.Schema): ... def on_bind_field(self, field_name, field_obj): ... field_obj.data_key =", "type: ignore _cls, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen ) if", "typ: Type, default: marshmallow.missing, newtype_supertype: Type, metadata: dict, base_schema: Optional[Type[marshmallow.Schema]],", "parameter to pass to marshmallow's field }) website:str = field(metadata", "True ... >>> class_schema(Building) # Returns a marshmallow schema class", "if not metadata.get(\"required\"): metadata.setdefault(\"missing\", default) else: metadata.setdefault(\"required\", True) # If", "subtyp in arguments ], **metadata, ) # typing.NewType returns a", "marshmallow schema used as a base class when deriving dataclass", "if many: return [clazz(**loaded) for loaded in all_loaded] else: return", "arguments supported by marshmallow field classes can be passed in", "a marshmallow schema class (not an instance) <class 'marshmallow.schema.Building'> >>>", "\"Schema\", None) # Nested dataclasses forward_reference = getattr(typ, \"__forward_arg__\", None)", "(tuple, Tuple): children = tuple( field_for_schema(arg, base_schema=base_schema) for arg in", "else {} if origin in (list, List): child_type = field_for_schema(arguments[0],", "... }) >>> person Person(name='Anonymous', friends=[Person(name='<NAME>', friends=[])]) >>> @dataclasses.dataclass() ...", "Set[str] = {\"Meta\"} # Max number of generated schemas that", "to this function will be transmitted to the marshmallow field", "Artist: ... name: str >>> Artist.Schema <class 'marshmallow.schema.Artist'> >>> from", "... name: str >>> Artist.Schema <class 'marshmallow.schema.Artist'> >>> from typing", "base_schema) # type: ignore return clazz return decorator(_cls) if _cls", "... class Anything: ... name: str ... @marshmallow.validates('name') ... def", "base_schema.TYPE_MAPPING.get(typ) ) or marshmallow.Schema.TYPE_MAPPING.get(typ) def _field_by_supertype( typ: Type, default: marshmallow.missing,", "elif typing_inspect.is_union_type(typ): from . import union_field return union_field.Union( [ (", "when https://github.com/python/mypy/issues/6910 is fixed default_factory = field.default_factory # type: ignore", "metadata.get(\"default\", None) metadata[\"missing\"] = metadata.get(\"missing\", None) metadata[\"required\"] = False return", "class Anything: ... name: str ... @marshmallow.validates('name') ... def validates(self,", "('name', 'Eiffel Tower')]) >>> @dataclasses.dataclass() ... class Person: ... name:", "when deriving dataclass schema >>> class BaseSchema(marshmallow.Schema): ... def on_bind_field(self,", "None) nested = ( nested_schema or forward_reference or class_schema(typ, base_schema=base_schema)", "typing import ClassVar >>> from marshmallow import Schema >>> @dataclass(order=True)", ">>> # noinspection PyTypeChecker >>> class_schema(None) # unsupported type Traceback", "marshmallow field classes can be passed in the `metadata` dictionary", "}) ... >>> class_schema(Website)().load({\"url\": \"I am not a good URL", "# typing.NewType returns a function with a __supertype__ attribute newtype_supertype", "Artist.Schema <class 'marshmallow.schema.Artist'> >>> from typing import ClassVar >>> from", ") # enumerations if isinstance(typ, EnumMeta): import marshmallow_enum return marshmallow_enum.EnumField(typ,", "in (list, List): child_type = field_for_schema(arguments[0], base_schema=base_schema) list_type = type_mapping.get(List,", "many: bool = None, **kwargs): all_loaded = super().load(data, many=many, **kwargs)", "... field_obj.data_key = (field_obj.data_key or field_name).upper() >>> @add_schema(base_schema=BaseSchema) ... @dataclasses.dataclass", "class when deriving dataclass schema :return: A marshmallow Schema corresponding", ">>> from typing import ClassVar >>> from marshmallow import Schema", "unsafe_hash: bool = False, frozen: bool = False, base_schema: Optional[Type[marshmallow.Schema]]", "dataclasses.field(default_factory=lambda: []) ... >>> citySchema = class_schema(City)() >>> city =", "`metadata` dictionary of a field. If you want to use", "ignore return clazz return decorator(_cls) if _cls else decorator def", "will be created for it too. ... other_buildings: List[Building] =", "a marshmallow default value given a dataclass default value >>>", "elif field.default is dataclasses.MISSING: return marshmallow.missing return field.default def NewType(", "custom NewType implementation typ_args = getattr(typ, \"_marshmallow_args\", {}) # Handle", "Callable, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union,", "... TypeError: None is not a dataclass and cannot be", "`type: ignore` when mypy handles dynamic base classes # https://github.com/python/mypy/issues/2813", "\"validate\" in meta_dict: if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"]) elif callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"]", "= None ) -> Type[_U]: ... def add_schema(_cls=None, base_schema=None): \"\"\"", "[\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1']) >>> Email = NewType('Email', str, field=marshmallow.fields.Email) >>> @dataclass", "get an OrderedDict because we specified order = True in", "function with a __supertype__ attribute newtype_supertype = getattr(typ, \"__supertype__\", None)", "import field from marshmallow_dataclass import dataclass import datetime @dataclass class", "you want to use a custom marshmallow field (one that", "'marshmallow.schema.Building'> >>> @dataclasses.dataclass() ... class City: ... name: str =", "clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: \"\"\"", "cast(Type[marshmallow.Schema], schema_class) def _field_by_type( typ: Union[type, Any], base_schema: Optional[Type[marshmallow.Schema]] )", "as arguments to the marshmallow Field. :param typ: The type", "@dataclass ... class Artist: ... name: str >>> Artist.Schema <class", "Type[_U], field: Optional[Type[marshmallow.fields.Field]] = None, **kwargs, ) -> Callable[[_U], _U]:", "\"NewType\"] NoneType = type(None) _U = TypeVar(\"_U\") # Whitelist of", "marshmallow field constructor. >>> import marshmallow.validate >>> IPv4 = NewType('IPv4',", "from marshmallow_dataclass import dataclass @dataclass class Point: x:float y:float point", "else decorator def class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None", "import typing >>> Meters = typing.NewType('Meters', float) >>> @dataclasses.dataclass() ...", "= None ) -> Type[marshmallow.Schema]: try: # noinspection PyDataclass fields:", "a field should be generated :param default: value to use", "\"class_schema\", \"field_for_schema\", \"NewType\"] NoneType = type(None) _U = TypeVar(\"_U\") #", "of the dataclass field is used as arguments to the", "if \"validate\" in meta_dict: if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"]) elif callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"])", "if many is None else bool(many) if many: return [clazz(**loaded)", "Generic types origin = typing_inspect.get_origin(typ) if origin: arguments = typing_inspect.get_args(typ,", "citySchema.load({\"name\":\"Paris\", \"best_building\": {\"name\": \"Eiffel Tower\"}}) >>> city City(name='Paris', best_building=Building(height=None, name='Eiffel", "not dataclasses.MISSING: return default_factory elif field.default is dataclasses.MISSING: return marshmallow.missing", "default=default, newtype_supertype=newtype_supertype, metadata=metadata, base_schema=base_schema, ) # enumerations if isinstance(typ, EnumMeta):", "arguments ], **metadata, ) # typing.NewType returns a function with", "... height: Optional[Meters] ... name: str = dataclasses.field(default=\"anonymous\") ... class", "Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: \"\"\" Convert a class", "@dataclass class User: birth: datetime.date = field(metadata= { \"required\": True", "base classes # https://github.com/python/mypy/issues/2813 class BaseSchema(base_schema or marshmallow.Schema): # type:", "a good URL !\"}) Traceback (most recent call last): ...", "eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen ) if _cls is None: return", "import EnumMeta from functools import lru_cache from typing import (", "dictionary. >>> import typing >>> Meters = typing.NewType('Meters', float) >>>", "clazz.Schema = class_schema(clazz, base_schema) # type: ignore return clazz return", "Person(name='Anonymous', friends=[Person(name='<NAME>', friends=[])]) >>> @dataclasses.dataclass() ... class C: ... important:", "it with an # underscore. The presence of _cls is", "expected pattern.']}} >>> MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1']) >>> Email = NewType('Email',", "note:: All the arguments supported by marshmallow field classes can", "class Website: ... url:str = dataclasses.field(metadata = { ... \"marshmallow_field\":", ") -> Type[marshmallow.Schema]: try: # noinspection PyDataclass fields: Tuple[dataclasses.Field, ...]", "field.type, _get_field_default(field), field.metadata, base_schema ), ) for field in fields", "schema for `clazz` derived either from `base_schema` or `BaseSchema` \"\"\"", "\"unimportant\": 9 # This field will NOT be imported ...", "from . import union_field return union_field.Union( [ ( subtyp, field_for_schema(", "multiple validators from both `typ` and `metadata`. # See https://github.com/lovasoa/marshmallow_dataclass/issues/91", "decorator(_cls) if _cls else decorator def class_schema( clazz: type, base_schema:", ">>> city = citySchema.load({\"name\":\"Paris\", \"best_building\": {\"name\": \"Eiffel Tower\"}}) >>> city", "... }, unknown=marshmallow.EXCLUDE) >>> c C(important=9, unimportant=0) >>> @dataclasses.dataclass ...", ">>> int_field.default 9 >>> field_for_schema(str, metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__ <class 'marshmallow.fields.Url'> \"\"\"", "... ordered = True ... >>> class_schema(Building) # Returns a", "members to contain marshmallow fields instead of dataclass fields attributes.update(", "marshmallow field (one that has no equivalent python type), you", "origin in (list, List): child_type = field_for_schema(arguments[0], base_schema=base_schema) list_type =", "default: marshmallow.missing, newtype_supertype: Type, metadata: dict, base_schema: Optional[Type[marshmallow.Schema]], ) ->", "= dataclasses.fields(clazz) except TypeError: # Not a dataclass try: return", "is None else dict(metadata) if default is not marshmallow.missing: metadata.setdefault(\"default\",", "ClassVar >>> from marshmallow import Schema >>> @dataclass(order=True) # preserve", "metadata[\"default\"] = metadata.get(\"default\", None) metadata[\"missing\"] = metadata.get(\"missing\", None) metadata[\"required\"] =", "Recursive field ... >>> person = class_schema(Person)().load({ ... \"friends\": [{\"name\":", "None) metadata[\"missing\"] = metadata.get(\"missing\", None) metadata[\"required\"] = False return field_for_schema(subtyp,", "metadata of the dataclass field is used as arguments to", "-> Type[marshmallow.Schema]: \"\"\" Convert a class to a marshmallow schema", "y=0) point_json = Point.Schema().dumps(point) Full example:: from marshmallow import Schema", "dynamic base classes # https://github.com/python/mypy/issues/2813 class BaseSchema(base_schema or marshmallow.Schema): #", "try: # noinspection PyDataclass fields: Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz) except", "[] for meta_dict in (typ_args, metadata): if \"validate\" in meta_dict:", "Copy all marshmallow hooks and whitelisted members of the dataclass", "... class Website: ... url:str = dataclasses.field(metadata = { ...", "# decorator is being called with parameters or not. def", "TypeError: None is not a dataclass and cannot be turned", "supported by marshmallow field classes can be passed in the", ":mod:`dataclasses` to :mod:`marshmallow` schemas. It takes a python class, and", "True in the Meta class OrderedDict([('height', None), ('name', 'Eiffel Tower')])", "class C: ... important: int = dataclasses.field(init=True, default=0) ... #", "metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__ <class 'marshmallow.fields.Url'> \"\"\" metadata = {} if metadata", "because we specified order = True in the Meta class", "Nested marshmallow dataclass nested_schema = getattr(typ, \"Schema\", None) # Nested", "field should be generated :param default: value to use for", ">>> ContactInfo.Schema().load({\"mail\": \"grumble grumble\"}) Traceback (most recent call last): ...", "the same as dataclasses.dataclass, but also applies :func:`add_schema`. It adds", "_U): return x new_type.__name__ = name new_type.__supertype__ = typ #", "marshmallow schema :param clazz: A python class (may be a", "= dataclasses.field(default_factory=lambda:[]) # Recursive field ... >>> person = class_schema(Person)().load({", "as a function, so ignore type check dc = dataclasses.dataclass(", "internally. :param type cls: The dataclass to which a Schema", "in the metadata dictionary. >>> import typing >>> Meters =", "default value given a dataclass default value >>> _get_field_default(dataclasses.field()) <marshmallow.missing>", "default_factory is not dataclasses.MISSING: return default_factory elif field.default is dataclasses.MISSING:", "if origin: arguments = typing_inspect.get_args(typ, True) # Override base_schema.TYPE_MAPPING to", "Callable[[Type[_U]], Type[_U]]: ... @overload def add_schema( _cls: Type[_U], base_schema: Type[marshmallow.Schema]", "ignore type check dc = dataclasses.dataclass( # type: ignore _cls,", "marshmallow.missing, newtype_supertype: Type, metadata: dict, base_schema: Optional[Type[marshmallow.Schema]], ) -> marshmallow.fields.Field:", "field = getattr(typ, \"_marshmallow_field\", None) if field: return field(**metadata) else:", "typ=typ, default=default, newtype_supertype=newtype_supertype, metadata=metadata, base_schema=base_schema, ) # enumerations if isinstance(typ,", "lambda cls: add_schema(dc(cls), base_schema) return add_schema(dc, base_schema) @overload def add_schema(_cls:", "set for required fields. if not metadata.get(\"required\"): metadata.setdefault(\"missing\", default) else:", "Point.Schema().load({'x':0, 'y':0}) # This line can be statically type checked", "field. (Usually spawned from NewType) \"\"\" # Add the information", ":mod:`marshmallow` schemas. It takes a python class, and generates a", "class Building: ... height: Optional[Meters] ... name: str = dataclasses.field(default=\"anonymous\")", "for t in arguments if t is not NoneType) #", "call last): ... marshmallow.exceptions.ValidationError: {'ips': {1: ['String does not match", "= class_schema(City)() >>> city = citySchema.load({\"name\":\"Paris\", \"best_building\": {\"name\": \"Eiffel Tower\"}})", "the Meta class OrderedDict([('height', None), ('name', 'Eiffel Tower')]) >>> @dataclasses.dataclass()", "= class_schema(clazz, base_schema) # type: ignore return clazz return decorator(_cls)", "a marshmallow schema for it. Simple example:: from marshmallow import", "bool = True, order: bool = False, unsafe_hash: bool =", "returns a function with a __supertype__ attribute newtype_supertype = getattr(typ,", "when deriving dataclass schema :return: A marshmallow Schema corresponding to", "a function, so ignore type check dc = dataclasses.dataclass( #", "attribute newtype_supertype = getattr(typ, \"__supertype__\", None) if newtype_supertype and inspect.isfunction(typ):", "citySchema = class_schema(City)() >>> city = citySchema.load({\"name\":\"Paris\", \"best_building\": {\"name\": \"Eiffel", "Type[_U]) -> Type[_U]: clazz.Schema = class_schema(clazz, base_schema) # type: ignore", "marshmallow_dataclass import dataclass @dataclass class Point: x:float y:float point =", "= dataclasses.field(default=\"anonymous\") ... class Meta: ... ordered = True ...", "field ... >>> person = class_schema(Person)().load({ ... \"friends\": [{\"name\": \"<NAME>\"}]", "'marshmallow.fields.Integer'> >>> int_field.default 9 >>> field_for_schema(str, metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__ <class 'marshmallow.fields.Url'>", "@lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def _proxied_class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None )", "@dataclass class Point: x:float y:float point = Point(x=0, y=0) point_json", "recent call last): ... marshmallow.exceptions.ValidationError: {'url': ['Not a valid URL.']}", "an # underscore. The presence of _cls is used to", "to pass to marshmallow's field }) website:str = field(metadata =", "Custom marshmallow field ... }) ... >>> class_schema(Website)().load({\"url\": \"I am", "factory that creates a schema for `clazz` derived either from", "which a Schema should be added :param base_schema: marshmallow schema", "TypeVar, Union, cast, overload, ) import dataclasses import marshmallow import", "to which you can attach custom marshmallow attributes. All the", "for it. Simple example:: from marshmallow import Schema from marshmallow_dataclass", ">>> int_field.__class__ <class 'marshmallow.fields.Integer'> >>> int_field.default 9 >>> field_for_schema(str, metadata={\"marshmallow_field\":", "if default is not marshmallow.missing: metadata.setdefault(\"default\", default) # 'missing' must", "dataclasses.field(init=False, default=0) ... >>> c = class_schema(C)().load({ ... \"important\": 9,", "\"field_for_schema\", \"NewType\"] NoneType = type(None) _U = TypeVar(\"_U\") # Whitelist", "field_for_schema(int, default=9, metadata=dict(required=True)) >>> int_field.__class__ <class 'marshmallow.fields.Integer'> >>> int_field.default 9", "members of the dataclass to the schema. attributes = {", "See https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators: List[Callable] = [] for meta_dict in (typ_args,", "origin: arguments = typing_inspect.get_args(typ, True) # Override base_schema.TYPE_MAPPING to change", "return clazz(**all_loaded) return BaseSchema def _get_field_default(field: dataclasses.Field): \"\"\" Return a", "grumble\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'mail': ['Not", "marshmallow import Schema >>> @dataclass(order=True) # preserve field order ...", "If you want to use a custom marshmallow field (one", "Type[_U]]: ... @overload def add_schema( _cls: Type[_U], base_schema: Type[marshmallow.Schema] =", "valid email address.']} \"\"\" def new_type(x: _U): return x new_type.__name__", "value): ... if len(value) > 5: raise marshmallow.ValidationError(\"Name too long\")", "dataclasses.field(default_factory=lambda:[]) # Recursive field ... >>> person = class_schema(Person)().load({ ...", "Tuple, Type, TypeVar, Union, cast, overload, ) import dataclasses import", "5: raise marshmallow.ValidationError(\"Name too long\") >>> class_schema(Anything)().load({\"name\": \"aaaaaargh\"}) Traceback (most", "{\"name\": \"Eiffel Tower\"}}) >>> city City(name='Paris', best_building=Building(height=None, name='Eiffel Tower'), other_buildings=[])", "... \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field ... }) ...", "default_factory = field.default_factory # type: ignore if default_factory is not", "from functools import lru_cache from typing import ( Any, Callable,", "... >>> Point.Schema().load({'x':0, 'y':0}) # This line can be statically", "schema will be created for it too. ... other_buildings: List[Building]", "metadata=metadata, base_schema=base_schema, ) # enumerations if isinstance(typ, EnumMeta): import marshmallow_enum", "decorator def class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None )", "List[Any] elif typ is dict: typ = Dict[Any, Any] #", "Type[marshmallow.Schema] = None, ) -> Callable[[Type[_U]], Type[_U]]: ... @overload def", "= None, ) -> marshmallow.fields.Field: \"\"\" Get a marshmallow Field", "_cls: Type[_U], base_schema: Type[marshmallow.Schema] = None ) -> Type[_U]: ...", "field(metadata= { \"required\": True # A parameter to pass to", "[\"0.0.0.0\", \"grumble grumble\"]}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError:", "is list: typ = List[Any] elif typ is dict: typ", "... mail: Email = dataclasses.field(default=\"<EMAIL>\") >>> ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>') >>> ContactInfo.Schema().load({\"mail\":", "newtype_supertype and inspect.isfunction(typ): return _field_by_supertype( typ=typ, default=default, newtype_supertype=newtype_supertype, metadata=metadata, base_schema=base_schema,", ") import dataclasses import marshmallow import typing_inspect __all__ = [\"dataclass\",", "= getattr(typ, \"__supertype__\", None) if newtype_supertype and inspect.isfunction(typ): return _field_by_supertype(", "}) >>> person Person(name='Anonymous', friends=[Person(name='<NAME>', friends=[])]) >>> @dataclasses.dataclass() ... class", "add_schema(dc(cls), base_schema) return add_schema(dc, base_schema) @overload def add_schema(_cls: Type[_U]) ->", "a Schema should be added :param base_schema: marshmallow schema used", "return marshmallow_enum.EnumField(typ, **metadata) # Nested marshmallow dataclass nested_schema = getattr(typ,", "= True, eq: bool = True, order: bool = False,", "@dataclasses.dataclass() ... class Building: ... height: Optional[Meters] ... name: str", "<class 'marshmallow.fields.Url'> \"\"\" metadata = {} if metadata is None", "-> marshmallow.fields.Field: \"\"\" Get a marshmallow Field corresponding to the", "already defined by the user predefined_field = metadata.get(\"marshmallow_field\") if predefined_field:", "cast, overload, ) import dataclasses import marshmallow import typing_inspect __all__", "union_field.Union( [ ( subtyp, field_for_schema( subtyp, metadata=metadata, base_schema=base_schema ), )", "(typ_args, metadata): if \"validate\" in meta_dict: if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"]) elif", "schema. attributes = { k: v for k, v in", "\"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field ... }) ... >>>", "example:: from marshmallow import Schema from marshmallow_dataclass import dataclass @dataclass", "True, order: bool = False, unsafe_hash: bool = False, frozen:", "a marshmallow schema :param clazz: A python class (may be", "marshmallow.exceptions.ValidationError: {'ips': {1: ['String does not match expected pattern.']}} >>>", "(most recent call last): ... marshmallow.exceptions.ValidationError: {'name': ['Name too long']}", "by keyword, so start it with an # underscore. The", "the marshmallow field constructor :param base_schema: marshmallow schema used as", "... names: Tuple[str, str] >>> artist = Artist.Schema().loads('{\"NAMES\": [\"Martin\", \"Ramirez\"]}')", "implementation typ_args = getattr(typ, \"_marshmallow_args\", {}) # Handle multiple validators", "you can attach custom marshmallow attributes. All the keyword arguments", "Tower')]) >>> @dataclasses.dataclass() ... class Person: ... name: str =", "missing :param metadata: Additional parameters to pass to the marshmallow", "= None, ): \"\"\" This decorator does the same as", "return union_field.Union( [ ( subtyp, field_for_schema( subtyp, metadata=metadata, base_schema=base_schema ),", "class_schema(clazz, base_schema) # type: ignore return clazz return decorator(_cls) if", "Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'_schema': ['never valid']}", "Any] # Base types field = _field_by_type(typ, base_schema) if field:", "# Treat optional types as types with a None default", "base_schema.TYPE_MAPPING if base_schema else {} if origin in (list, List):", "NoneType) # type: ignore # Treat optional types as types", "add_schema( base_schema: Type[marshmallow.Schema] = None, ) -> Callable[[Type[_U]], Type[_U]]: ...", "@dataclasses.dataclass() ... class C: ... important: int = dataclasses.field(init=True, default=0)", "# type: ignore return new_type if __name__ == \"__main__\": import", "as a base class when deriving dataclass schema :return: A", "metadata.setdefault(\"required\", True) # If the field was already defined by", "**metadata, ) # typing.NewType returns a function with a __supertype__", "List[IPv4] >>> MyIps.Schema().load({\"ips\": [\"0.0.0.0\", \"grumble grumble\"]}) Traceback (most recent call", "... ips: List[IPv4] >>> MyIps.Schema().load({\"ips\": [\"0.0.0.0\", \"grumble grumble\"]}) Traceback (most", "ContactInfo.Schema().load({\"mail\": \"grumble grumble\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError:", "'y':0}) # This line can be statically type checked Point(x=0.0,", "default_factory elif field.default is dataclasses.MISSING: return marshmallow.missing return field.default def", ">>> citySchema.load({\"name\":\"Paris\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'best_building':", "decorator(clazz: Type[_U]) -> Type[_U]: clazz.Schema = class_schema(clazz, base_schema) # type:", "passed in the `metadata` dictionary of a field. If you", "https://github.com/python/mypy/issues/2813 class BaseSchema(base_schema or marshmallow.Schema): # type: ignore def load(self,", "value to use for (de)serialization when the field is missing", "below type_mapping = base_schema.TYPE_MAPPING if base_schema else {} if origin", "type: ignore return new_type if __name__ == \"__main__\": import doctest", "@dataclass ... class MyIps: ... ips: List[IPv4] >>> MyIps.Schema().load({\"ips\": [\"0.0.0.0\",", "arg in arguments ) tuple_type = type_mapping.get(Tuple, marshmallow.fields.Tuple) return tuple_type(children,", "if new_validators else None metadata = {\"description\": typ.__name__, **typ_args, **metadata}", "... class Point: ... x:float ... y:float ... Schema: ClassVar[Type[Schema]]", "URL.']} >>> @dataclasses.dataclass ... class NeverValid: ... @marshmallow.validates_schema ... def", "for meta_dict in (typ_args, metadata): if \"validate\" in meta_dict: if", "field.']} >>> city_json = citySchema.dump(city) >>> city_json['best_building'] # We get", ">>> @dataclass ... class MyIps: ... ips: List[IPv4] >>> MyIps.Schema().load({\"ips\":", "= new_validators if new_validators else None metadata = {\"description\": typ.__name__,", ">>> person Person(name='Anonymous', friends=[Person(name='<NAME>', friends=[])]) >>> @dataclasses.dataclass() ... class C:", "specified without type arguments if typ is list: typ =", "attribute to the class object :param base_schema: marshmallow schema used", "in (tuple, Tuple): children = tuple( field_for_schema(arg, base_schema=base_schema) for arg", "-> Type[marshmallow.Schema]: try: # noinspection PyDataclass fields: Tuple[dataclasses.Field, ...] =", "pass it as the ``marshmallow_field`` key in the metadata dictionary.", "Schema # For the type checker \"\"\" import inspect from", "doesn't expect it to be called as a function, so", "\"\"\" def decorator(clazz: Type[_U]) -> Type[_U]: clazz.Schema = class_schema(clazz, base_schema)", "not a dataclass and cannot be turned into one.\" )", "tuple_type = type_mapping.get(Tuple, marshmallow.fields.Tuple) return tuple_type(children, **metadata) elif origin in", "for loaded in all_loaded] else: return clazz(**all_loaded) return BaseSchema def", "if typ is list: typ = List[Any] elif typ is", "base_schema) if field: return field(**metadata) if typ is Any: metadata.setdefault(\"allow_none\",", "point_json = Point.Schema().dumps(point) Full example:: from marshmallow import Schema from", "... >>> class_schema(NeverValid)().load({}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError:", "Set, Tuple, Type, TypeVar, Union, cast, overload, ) import dataclasses", "and whitelisted members of the dataclass to the schema. attributes", "the user predefined_field = metadata.get(\"marshmallow_field\") if predefined_field: return predefined_field #", "= dataclasses.field(metadata = { ... \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow", "None, *, repr: bool = True, eq: bool = True,", ">>> Email = NewType('Email', str, field=marshmallow.fields.Email) >>> @dataclass ... class", "Schema >>> @dataclass(order=True) # preserve field order ... class Point:", "call last): ... TypeError: None is not a dataclass and", "False, unsafe_hash: bool = False, frozen: bool = False, base_schema:", "but also applies :func:`add_schema`. It adds a `.Schema` attribute to", "be called as a function, so ignore type check dc", "type, default=marshmallow.missing, metadata: Mapping[str, Any] = None, base_schema: Optional[Type[marshmallow.Schema]] =", "arguments passed to this function will be transmitted to the", ") -> Callable[[_U], _U]: \"\"\"NewType creates simple unique types to", "should never be specified by keyword, so start it with", "and base_schema.TYPE_MAPPING.get(typ) ) or marshmallow.Schema.TYPE_MAPPING.get(typ) def _field_by_supertype( typ: Type, default:", "else: metadata.setdefault(\"required\", True) # If the field was already defined", "be specified by keyword, so start it with an #", "= None ) -> Type[marshmallow.Schema]: \"\"\" Base schema factory that", "return _proxied_class_schema(clazz, base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def _proxied_class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]]", "call last): ... marshmallow.exceptions.ValidationError: {'mail': ['Not a valid email address.']}", "a class to a marshmallow schema :param clazz: A python", ">>> ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>') >>> ContactInfo.Schema().load({\"mail\": \"grumble grumble\"}) Traceback (most recent", "All the arguments supported by marshmallow field classes can be", "Any] = None, base_schema: Optional[Type[marshmallow.Schema]] = None, ) -> marshmallow.fields.Field:", "name: str ... @marshmallow.validates('name') ... def validates(self, value): ... if", "of python 3.7's :mod:`dataclasses` to :mod:`marshmallow` schemas. It takes a", "Convert a class to a marshmallow schema :param clazz: A", "the schema members to contain marshmallow fields instead of dataclass", "adds a `.Schema` attribute to the class object :param base_schema:", "the information coming our custom NewType implementation typ_args = getattr(typ,", "arguments to the marshmallow Field. :param typ: The type for", "\"__marshmallow_hook__\") or k in MEMBERS_WHITELIST } # Update the schema", "9, # This field will be imported ... \"unimportant\": 9", "a valid email address.']} \"\"\" def new_type(x: _U): return x", "# Update the schema members to contain marshmallow fields instead", "\"\"\" This library allows the conversion of python 3.7's :mod:`dataclasses`", "list: typ = List[Any] elif typ is dict: typ =", "and inspect.isfunction(typ): return _field_by_supertype( typ=typ, default=default, newtype_supertype=newtype_supertype, metadata=metadata, base_schema=base_schema, )", "(most recent call last): ... marshmallow.exceptions.ValidationError: {'mail': ['Not a valid", "email address.']} \"\"\" def new_type(x: _U): return x new_type.__name__ =", "x:float y:float point = Point(x=0, y=0) point_json = Point.Schema().dumps(point) Full", "return tuple_type(children, **metadata) elif origin in (dict, Dict): dict_type =", "EnumMeta): import marshmallow_enum return marshmallow_enum.EnumField(typ, **metadata) # Nested marshmallow dataclass", "... important: int = dataclasses.field(init=True, default=0) ... # Only fields", "... class Artist: ... names: Tuple[str, str] >>> artist =", "deriving dataclass schema >>> @dataclass ... class Artist: ... name:", "__supertype__ attribute newtype_supertype = getattr(typ, \"__supertype__\", None) if newtype_supertype and", "metadata.get(\"required\"): metadata.setdefault(\"missing\", default) else: metadata.setdefault(\"required\", True) # If the field", "_U]: \"\"\"NewType creates simple unique types to which you can", "call last): ... marshmallow.exceptions.ValidationError: {'_schema': ['never valid']} >>> # noinspection", "types with a None default metadata[\"default\"] = metadata.get(\"default\", None) metadata[\"missing\"]", "= False, base_schema: Optional[Type[marshmallow.Schema]] = None, ): \"\"\" This decorator", "This field will NOT be imported ... }, unknown=marshmallow.EXCLUDE) >>>", "in a dataclass. It uses :func:`class_schema` internally. :param type cls:", "\"\"\" This decorator does the same as dataclasses.dataclass, but also", "Callable[[_U], _U]: \"\"\"NewType creates simple unique types to which you", "if field.init ) schema_class = type(clazz.__name__, (_base_schema(clazz, base_schema),), attributes) return", "class used for generic types below type_mapping = base_schema.TYPE_MAPPING if", "dict(metadata) if default is not marshmallow.missing: metadata.setdefault(\"default\", default) # 'missing'", ">>> artist = Artist.Schema().loads('{\"NAMES\": [\"Martin\", \"Ramirez\"]}') >>> artist Artist(names=('Martin', 'Ramirez'))", "\"__supertype__\", None) if newtype_supertype and inspect.isfunction(typ): return _field_by_supertype( typ=typ, default=default,", "For the type checker \"\"\" import inspect from enum import", "dataclass @dataclass class Point: x:float y:float point = Point(x=0, y=0)", "function will be transmitted to the marshmallow field constructor. >>>", "is being called with parameters or not. def dataclass( _cls:", "raise marshmallow.ValidationError('never valid') ... >>> class_schema(NeverValid)().load({}) Traceback (most recent call", "when deriving dataclass schema >>> int_field = field_for_schema(int, default=9, metadata=dict(required=True))", "as a base class when deriving dataclass schema >>> @dataclass", "... marshmallow.exceptions.ValidationError: {'ips': {1: ['String does not match expected pattern.']}}", "_U = TypeVar(\"_U\") # Whitelist of dataclass members that will", "schema used as a base class when deriving dataclass schema", "NeverValid: ... @marshmallow.validates_schema ... def validate(self, data, **_): ... raise", "metadata.setdefault(\"missing\", default) else: metadata.setdefault(\"required\", True) # If the field was", "it too. ... other_buildings: List[Building] = dataclasses.field(default_factory=lambda: []) ... >>>", "import Schema >>> @dataclass(order=True) # preserve field order ... class", "unique types to which you can attach custom marshmallow attributes.", "generated schema. MEMBERS_WHITELIST: Set[str] = {\"Meta\"} # Max number of", "from `base_schema` or `BaseSchema` \"\"\" # Remove `type: ignore` when", "def dataclass( _cls: Type[_U] = None, *, repr: bool =", "marshmallow dataclass nested_schema = getattr(typ, \"Schema\", None) # Nested dataclasses", "type: ignore new_type._marshmallow_args = kwargs # type: ignore return new_type", "if metadata is None else dict(metadata) if default is not", "or marshmallow.Schema.TYPE_MAPPING.get(typ) def _field_by_supertype( typ: Type, default: marshmallow.missing, newtype_supertype: Type,", "BaseSchema(base_schema or marshmallow.Schema): # type: ignore def load(self, data: Mapping,", "are in the __init__ method will be added: ... unimportant:", "if len(value) > 5: raise marshmallow.ValidationError(\"Name too long\") >>> class_schema(Anything)().load({\"name\":", "start it with an # underscore. The presence of _cls", "generic types below type_mapping = base_schema.TYPE_MAPPING if base_schema else {}", "class Point: x:float y:float point = Point(x=0, y=0) point_json =", "the dataclass to the schema. attributes = { k: v", ". import union_field return union_field.Union( [ ( subtyp, field_for_schema( subtyp,", "fixed default_factory = field.default_factory # type: ignore if default_factory is", "fields instead of dataclass fields attributes.update( ( field.name, field_for_schema( field.type,", "<class 'marshmallow.fields.Integer'> >>> int_field.default 9 >>> field_for_schema(str, metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__ <class", "last): ... marshmallow.exceptions.ValidationError: {'ips': {1: ['String does not match expected", "base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: try: # noinspection", "# type: ignore new_type._marshmallow_args = kwargs # type: ignore return", "new_type.__supertype__ = typ # type: ignore new_type._marshmallow_field = field #", "too long']} \"\"\" return _proxied_class_schema(clazz, base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def _proxied_class_schema( clazz:", "base_schema) except Exception: raise TypeError( f\"{getattr(clazz, '__name__', repr(clazz))} is not", "_field_by_type(typ, base_schema) if field: return field(**metadata) if typ is Any:", "presence of _cls is used to detect if this #", "attach custom marshmallow attributes. All the keyword arguments passed to", "dataclasses.fields(clazz) except TypeError: # Not a dataclass try: return class_schema(dataclasses.dataclass(clazz),", "custom marshmallow attributes. All the keyword arguments passed to this", "which a field should be generated :param default: value to", "(one that has no equivalent python type), you can pass", "int_field = field_for_schema(int, default=9, metadata=dict(required=True)) >>> int_field.__class__ <class 'marshmallow.fields.Integer'> >>>", "= getattr(typ, \"Schema\", None) # Nested dataclasses forward_reference = getattr(typ,", "@dataclass ... class ContactInfo: ... mail: Email = dataclasses.field(default=\"<EMAIL>\") >>>", "marshmallow.Schema.TYPE_MAPPING.get(typ) def _field_by_supertype( typ: Type, default: marshmallow.missing, newtype_supertype: Type, metadata:", "None, **kwargs, ) -> Callable[[_U], _U]: \"\"\"NewType creates simple unique", "# This line can be statically type checked Point(x=0.0, y=0.0)", "= False return field_for_schema(subtyp, metadata=metadata, base_schema=base_schema) elif typing_inspect.is_union_type(typ): from .", "Any, Callable, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar,", "list_type(child_type, **metadata) if origin in (tuple, Tuple): children = tuple(", "typing.NewType returns a function with a __supertype__ attribute newtype_supertype =", "(most recent call last): ... marshmallow.exceptions.ValidationError: {'_schema': ['never valid']} >>>", "repr: bool = True, eq: bool = True, order: bool", "base_schema=base_schema ), ) for subtyp in arguments ], **metadata, )", "base_schema=base_schema), values=field_for_schema(arguments[1], base_schema=base_schema), **metadata, ) elif typing_inspect.is_optional_type(typ): subtyp = next(t", "... x:float ... y:float ... Schema: ClassVar[Type[Schema]] = Schema #", "\"\"\" import inspect from enum import EnumMeta from functools import", ">>> class_schema(Building) # Returns a marshmallow schema class (not an", "valid') ... >>> class_schema(NeverValid)().load({}) Traceback (most recent call last): ...", "marshmallow fields instead of dataclass fields attributes.update( ( field.name, field_for_schema(", "(field_obj.data_key or field_name).upper() >>> @add_schema(base_schema=BaseSchema) ... @dataclasses.dataclass ... class Artist:", "metadata is None else dict(metadata) if default is not marshmallow.missing:", "ContactInfo(mail='<EMAIL>') >>> ContactInfo.Schema().load({\"mail\": \"grumble grumble\"}) Traceback (most recent call last):", ") -> Callable[[Type[_U]], Type[_U]]: ... @overload def add_schema( _cls: Type[_U],", "newtype_supertype=newtype_supertype, metadata=metadata, base_schema=base_schema, ) # enumerations if isinstance(typ, EnumMeta): import", "( base_schema and base_schema.TYPE_MAPPING.get(typ) ) or marshmallow.Schema.TYPE_MAPPING.get(typ) def _field_by_supertype( typ:", "a base class when deriving dataclass schema >>> int_field =", "= class_schema(C)().load({ ... \"important\": 9, # This field will be", "field in fields if field.init ) schema_class = type(clazz.__name__, (_base_schema(clazz,", "city_json['best_building'] # We get an OrderedDict because we specified order", "( nested_schema or forward_reference or class_schema(typ, base_schema=base_schema) ) return marshmallow.fields.Nested(nested,", "an OrderedDict because we specified order = True in the", "C: ... important: int = dataclasses.field(init=True, default=0) ... # Only", "@marshmallow.validates('name') ... def validates(self, value): ... if len(value) > 5:", "return default_factory elif field.default is dataclasses.MISSING: return marshmallow.missing return field.default", "\"\"\" # Remove `type: ignore` when mypy handles dynamic base", "grumble\"]}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'ips': {1:", "dataclasses. A schema will be created for it too. ...", "field_for_schema(str, metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__ <class 'marshmallow.fields.Url'> \"\"\" metadata = {} if", "base_schema: Type[marshmallow.Schema] = None, ) -> Callable[[Type[_U]], Type[_U]]: ... @overload", "(most recent call last): ... marshmallow.exceptions.ValidationError: {'url': ['Not a valid", "NoneType = type(None) _U = TypeVar(\"_U\") # Whitelist of dataclass", "= [\"dataclass\", \"add_schema\", \"class_schema\", \"field_for_schema\", \"NewType\"] NoneType = type(None) _U", "A schema will be created for it too. ... other_buildings:", "... name: str = dataclasses.field(metadata={'required':True}) ... best_building: Building # Reference", "you can pass it as the ``marshmallow_field`` key in the", "marshmallow.validate >>> IPv4 = NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>> @dataclass ...", "= None, ) -> Callable[[Type[_U]], Type[_U]]: ... @overload def add_schema(", "None) metadata[\"required\"] = False return field_for_schema(subtyp, metadata=metadata, base_schema=base_schema) elif typing_inspect.is_union_type(typ):", "Optional[Type[marshmallow.Schema]] = None, ): \"\"\" This decorator does the same", "Whitelist of dataclass members that will be copied to generated", "... \"important\": 9, # This field will be imported ...", "metadata dictionary. >>> import typing >>> Meters = typing.NewType('Meters', float)", "{'mail': ['Not a valid email address.']} \"\"\" def new_type(x: _U):", "a super field. (Usually spawned from NewType) \"\"\" # Add", "fields. if not metadata.get(\"required\"): metadata.setdefault(\"missing\", default) else: metadata.setdefault(\"required\", True) #", ">>> @dataclasses.dataclass ... class Website: ... url:str = dataclasses.field(metadata =", "(list, List): child_type = field_for_schema(arguments[0], base_schema=base_schema) list_type = type_mapping.get(List, marshmallow.fields.List)", "kwargs # type: ignore return new_type if __name__ == \"__main__\":", "marshmallow_enum return marshmallow_enum.EnumField(typ, **metadata) # Nested marshmallow dataclass nested_schema =", "= dataclasses.field(metadata={'required':True}) ... best_building: Building # Reference to another dataclasses.", "new_validators else None metadata = {\"description\": typ.__name__, **typ_args, **metadata} field", "marshmallow.exceptions.ValidationError: {'mail': ['Not a valid email address.']} \"\"\" def new_type(x:", "cls: add_schema(dc(cls), base_schema) return add_schema(dc, base_schema) @overload def add_schema(_cls: Type[_U])", "field(**metadata) else: return field_for_schema( newtype_supertype, metadata=metadata, default=default, base_schema=base_schema, ) def", "\"Ramirez\"]}') >>> artist Artist(names=('Martin', 'Ramirez')) \"\"\" def decorator(clazz: Type[_U]) ->", "field will NOT be imported ... }, unknown=marshmallow.EXCLUDE) >>> c", "y:float ... Schema: ClassVar[Type[Schema]] = Schema # For the type", "str, typ: Type[_U], field: Optional[Type[marshmallow.fields.Field]] = None, **kwargs, ) ->", "name: str, typ: Type[_U], field: Optional[Type[marshmallow.fields.Field]] = None, **kwargs, )", "= typ # type: ignore new_type._marshmallow_field = field # type:", "Schema: ClassVar[Type[Schema]] = Schema # For the type checker \"\"\"", "and cannot be turned into one.\" ) # Copy all", "... @overload def add_schema( _cls: Type[_U], base_schema: Type[marshmallow.Schema] = None", "= {\"description\": typ.__name__, **typ_args, **metadata} field = getattr(typ, \"_marshmallow_field\", None)", "marshmallow attributes. All the keyword arguments passed to this function", "from typing import ClassVar >>> from marshmallow import Schema >>>", "best_building=Building(height=None, name='Eiffel Tower'), other_buildings=[]) >>> citySchema.load({\"name\":\"Paris\"}) Traceback (most recent call", "# Add the information coming our custom NewType implementation typ_args", "PyTypeChecker >>> class_schema(None) # unsupported type Traceback (most recent call", "the dataclass .. note:: All the arguments supported by marshmallow", "pattern.']}} >>> MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1']) >>> Email = NewType('Email', str,", "marshmallow Field corresponding to the given python type. The metadata", "class_schema keeps of generated schemas. Removes duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE = 1024", "... class C: ... important: int = dataclasses.field(init=True, default=0) ...", "[clazz(**loaded) for loaded in all_loaded] else: return clazz(**all_loaded) return BaseSchema", ">>> MyIps.Schema().load({\"ips\": [\"0.0.0.0\", \"grumble grumble\"]}) Traceback (most recent call last):", "None default metadata[\"default\"] = metadata.get(\"default\", None) metadata[\"missing\"] = metadata.get(\"missing\", None)", "we specified order = True in the Meta class OrderedDict([('height',", "typ_args = getattr(typ, \"_marshmallow_args\", {}) # Handle multiple validators from", "None, base_schema: Optional[Type[marshmallow.Schema]] = None, ) -> marshmallow.fields.Field: \"\"\" Get", "union_field return union_field.Union( [ ( subtyp, field_for_schema( subtyp, metadata=metadata, base_schema=base_schema", "type: ignore # Treat optional types as types with a", "types to which you can attach custom marshmallow attributes. All", "dataclass and cannot be turned into one.\" ) # Copy", "Point: x:float y:float point = Point(x=0, y=0) point_json = Point.Schema().dumps(point)", "in MEMBERS_WHITELIST } # Update the schema members to contain", "callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"] = new_validators if new_validators else None metadata", "{'ips': {1: ['String does not match expected pattern.']}} >>> MyIps.Schema().load({\"ips\":", "base_schema: marshmallow schema used as a base class when deriving", "List[Callable] = [] for meta_dict in (typ_args, metadata): if \"validate\"", "predefined_field: return predefined_field # Generic types specified without type arguments", "class OrderedDict([('height', None), ('name', 'Eiffel Tower')]) >>> @dataclasses.dataclass() ... class", "base_schema=base_schema) elif typing_inspect.is_union_type(typ): from . import union_field return union_field.Union( [", "instance) <class 'marshmallow.schema.Building'> >>> @dataclasses.dataclass() ... class City: ... name:", "value >>> _get_field_default(dataclasses.field()) <marshmallow.missing> \"\"\" # Remove `type: ignore` when", "return list_type(child_type, **metadata) if origin in (tuple, Tuple): children =", "and generates a marshmallow schema for it. Simple example:: from", "-> marshmallow.fields.Field: \"\"\" Return a new field for fields based", "{\"description\": typ.__name__, **typ_args, **metadata} field = getattr(typ, \"_marshmallow_field\", None) if", "dataclass( _cls: Type[_U] = None, *, repr: bool = True,", "into one.\" ) # Copy all marshmallow hooks and whitelisted", "Optional[Type[marshmallow.Schema]], ) -> marshmallow.fields.Field: \"\"\" Return a new field for", "used as arguments to the marshmallow Field. :param typ: The", "predefined_field # Generic types specified without type arguments if typ", "decorator is being called with parameters or not. def dataclass(", "can pass it as the ``marshmallow_field`` key in the metadata", "dict_type = type_mapping.get(Dict, marshmallow.fields.Dict) return dict_type( keys=field_for_schema(arguments[0], base_schema=base_schema), values=field_for_schema(arguments[1], base_schema=base_schema),", "parameters to pass to the marshmallow field constructor :param base_schema:", ":param type cls: The dataclass to which a Schema should", "a new field for fields based on a super field.", "Treat optional types as types with a None default metadata[\"default\"]", "= _field_by_type(typ, base_schema) if field: return field(**metadata) if typ is", "# Generic types specified without type arguments if typ is", "'marshmallow.schema.Artist'> >>> from typing import ClassVar >>> from marshmallow import", "artist = Artist.Schema().loads('{\"NAMES\": [\"Martin\", \"Ramirez\"]}') >>> artist Artist(names=('Martin', 'Ramirez')) \"\"\"", "c C(important=9, unimportant=0) >>> @dataclasses.dataclass ... class Website: ... url:str", "be turned into one. >>> @dataclasses.dataclass ... class Anything: ...", "= True, order: bool = False, unsafe_hash: bool = False,", "hasattr(v, \"__marshmallow_hook__\") or k in MEMBERS_WHITELIST } # Update the", "overload, ) import dataclasses import marshmallow import typing_inspect __all__ =", ">>> c = class_schema(C)().load({ ... \"important\": 9, # This field", "= List[Any] elif typ is dict: typ = Dict[Any, Any]", "class ContactInfo: ... mail: Email = dataclasses.field(default=\"<EMAIL>\") >>> ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>')", "ignore if default_factory is not dataclasses.MISSING: return default_factory elif field.default", "import ( Any, Callable, Dict, List, Mapping, Optional, Set, Tuple,", ">>> class_schema(None) # unsupported type Traceback (most recent call last):", "be created for it too. ... other_buildings: List[Building] = dataclasses.field(default_factory=lambda:", "friends=[])]) >>> @dataclasses.dataclass() ... class C: ... important: int =", "(_base_schema(clazz, base_schema),), attributes) return cast(Type[marshmallow.Schema], schema_class) def _field_by_type( typ: Union[type,", "Return a marshmallow default value given a dataclass default value", "= getattr(typ, \"_marshmallow_args\", {}) # Handle multiple validators from both", "is not NoneType) # type: ignore # Treat optional types", "schema class (not an instance) <class 'marshmallow.schema.Building'> >>> @dataclasses.dataclass() ...", "simple unique types to which you can attach custom marshmallow", "in fields if field.init ) schema_class = type(clazz.__name__, (_base_schema(clazz, base_schema),),", "(may be a dataclass) :param base_schema: marshmallow schema used as", "field for fields based on a super field. (Usually spawned", "elif typing_inspect.is_optional_type(typ): subtyp = next(t for t in arguments if", "dict: typ = Dict[Any, Any] # Base types field =", "{'best_building': ['Missing data for required field.']} >>> city_json = citySchema.dump(city)", "typ: type, default=marshmallow.missing, metadata: Mapping[str, Any] = None, base_schema: Optional[Type[marshmallow.Schema]]", "`type: ignore` when https://github.com/python/mypy/issues/6910 is fixed default_factory = field.default_factory #", "marshmallow.fields.List) return list_type(child_type, **metadata) if origin in (tuple, Tuple): children", "so ignore type check dc = dataclasses.dataclass( # type: ignore", "field. If you want to use a custom marshmallow field", "a field. If you want to use a custom marshmallow", "Type, default: marshmallow.missing, newtype_supertype: Type, metadata: dict, base_schema: Optional[Type[marshmallow.Schema]], )", "k: v for k, v in inspect.getmembers(clazz) if hasattr(v, \"__marshmallow_hook__\")", "Optional[Type[marshmallow.fields.Field]] = None, **kwargs, ) -> Callable[[_U], _U]: \"\"\"NewType creates", "Schema: ClassVar[Type[Schema]] = Schema # For the type checker ...", "a custom marshmallow field (one that has no equivalent python", ">>> MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1']) >>> Email = NewType('Email', str, field=marshmallow.fields.Email)", "!\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'url': ['Not", "call last): ... marshmallow.exceptions.ValidationError: {'best_building': ['Missing data for required field.']}", "MyIps.Schema().load({\"ips\": [\"0.0.0.0\", \"grumble grumble\"]}) Traceback (most recent call last): ...", "Max number of generated schemas that class_schema keeps of generated", "newtype_supertype: Type, metadata: dict, base_schema: Optional[Type[marshmallow.Schema]], ) -> marshmallow.fields.Field: \"\"\"", "return dict_type( keys=field_for_schema(arguments[0], base_schema=base_schema), values=field_for_schema(arguments[1], base_schema=base_schema), **metadata, ) elif typing_inspect.is_optional_type(typ):", "applies :func:`add_schema`. It adds a `.Schema` attribute to the class", "fields that are in the __init__ method will be added:", "\"aaaaaargh\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'name': ['Name", "# Nested dataclasses forward_reference = getattr(typ, \"__forward_arg__\", None) nested =", "name new_type.__supertype__ = typ # type: ignore new_type._marshmallow_field = field", "...] = dataclasses.fields(clazz) except TypeError: # Not a dataclass try:", "... }) ... >>> class_schema(Website)().load({\"url\": \"I am not a good", "# type: ignore def load(self, data: Mapping, *, many: bool", "schema >>> @dataclass ... class Artist: ... name: str >>>", "NewType) \"\"\" # Add the information coming our custom NewType", "height: Optional[Meters] ... name: str = dataclasses.field(default=\"anonymous\") ... class Meta:", "= citySchema.load({\"name\":\"Paris\", \"best_building\": {\"name\": \"Eiffel Tower\"}}) >>> city City(name='Paris', best_building=Building(height=None,", "Override base_schema.TYPE_MAPPING to change the class used for generic types", "getattr(typ, \"Schema\", None) # Nested dataclasses forward_reference = getattr(typ, \"__forward_arg__\",", ">>> @add_schema(base_schema=BaseSchema) ... @dataclasses.dataclass ... class Artist: ... names: Tuple[str,", "dataclasses.MISSING: return default_factory elif field.default is dataclasses.MISSING: return marshmallow.missing return", "to the given python type. The metadata of the dataclass", "= typing_inspect.get_origin(typ) if origin: arguments = typing_inspect.get_args(typ, True) # Override", "'__name__', repr(clazz))} is not a dataclass and cannot be turned", "spawned from NewType) \"\"\" # Add the information coming our", "class NeverValid: ... @marshmallow.validates_schema ... def validate(self, data, **_): ...", "dataclasses.dataclass( # type: ignore _cls, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen", "[\"Martin\", \"Ramirez\"]}') >>> artist Artist(names=('Martin', 'Ramirez')) \"\"\" def decorator(clazz: Type[_U])", "with a __supertype__ attribute newtype_supertype = getattr(typ, \"__supertype__\", None) if", "added :param base_schema: marshmallow schema used as a base class", "( field.name, field_for_schema( field.type, _get_field_default(field), field.metadata, base_schema ), ) for", "attributes.update( ( field.name, field_for_schema( field.type, _get_field_default(field), field.metadata, base_schema ), )", "default=default, base_schema=base_schema, ) def field_for_schema( typ: type, default=marshmallow.missing, metadata: Mapping[str,", "class MyIps: ... ips: List[IPv4] >>> MyIps.Schema().load({\"ips\": [\"0.0.0.0\", \"grumble grumble\"]})", "metadata.setdefault(\"allow_none\", True) return marshmallow.fields.Raw(**metadata) # Generic types origin = typing_inspect.get_origin(typ)", "dataclasses.field(default=\"anonymous\") ... class Meta: ... ordered = True ... >>>", "last): ... marshmallow.exceptions.ValidationError: {'best_building': ['Missing data for required field.']} >>>", "... def on_bind_field(self, field_name, field_obj): ... field_obj.data_key = (field_obj.data_key or", "to the dataclass .. note:: All the arguments supported by", "parameters or not. def dataclass( _cls: Type[_U] = None, *,", ">>> class_schema(Anything)().load({\"name\": \"aaaaaargh\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError:", "for k, v in inspect.getmembers(clazz) if hasattr(v, \"__marshmallow_hook__\") or k", "else: return clazz(**all_loaded) return BaseSchema def _get_field_default(field: dataclasses.Field): \"\"\" Return", "return lambda cls: add_schema(dc(cls), base_schema) return add_schema(dc, base_schema) @overload def", "marshmallow.fields.Tuple) return tuple_type(children, **metadata) elif origin in (dict, Dict): dict_type", "marshmallow default value given a dataclass default value >>> _get_field_default(dataclasses.field())", "str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>> @dataclass ... class MyIps: ... ips: List[IPv4]", "Field. :param typ: The type for which a field should", "base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def _proxied_class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None", "... other_buildings: List[Building] = dataclasses.field(default_factory=lambda: []) ... >>> citySchema =", "type for which a field should be generated :param default:", "python type), you can pass it as the ``marshmallow_field`` key", "\"best_building\": {\"name\": \"Eiffel Tower\"}}) >>> city City(name='Paris', best_building=Building(height=None, name='Eiffel Tower'),", "fields if field.init ) schema_class = type(clazz.__name__, (_base_schema(clazz, base_schema),), attributes)", "name: str >>> Artist.Schema <class 'marshmallow.schema.Artist'> >>> from typing import", "a base class when deriving dataclass schema >>> class BaseSchema(marshmallow.Schema):", "city_json = citySchema.dump(city) >>> city_json['best_building'] # We get an OrderedDict", "raise TypeError( f\"{getattr(clazz, '__name__', repr(clazz))} is not a dataclass and", "a dataclass. It uses :func:`class_schema` internally. :param type cls: The", "field.init ) schema_class = type(clazz.__name__, (_base_schema(clazz, base_schema),), attributes) return cast(Type[marshmallow.Schema],", "that class_schema keeps of generated schemas. Removes duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE =", "marshmallow.fields.Url() # Custom marshmallow field }) Schema: ClassVar[Type[Schema]] = Schema", "Artist(names=('Martin', 'Ramirez')) \"\"\" def decorator(clazz: Type[_U]) -> Type[_U]: clazz.Schema =", "website:str = field(metadata = { \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow", "call last): ... marshmallow.exceptions.ValidationError: {'name': ['Name too long']} \"\"\" return", "to the marshmallow Field. :param typ: The type for which", "= field(metadata = { \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field", "other_buildings: List[Building] = dataclasses.field(default_factory=lambda: []) ... >>> citySchema = class_schema(City)()", "adds a marshmallow schema as the 'Schema' attribute in a", "will be added: ... unimportant: int = dataclasses.field(init=False, default=0) ...", "if newtype_supertype and inspect.isfunction(typ): return _field_by_supertype( typ=typ, default=default, newtype_supertype=newtype_supertype, metadata=metadata,", "field_obj.data_key = (field_obj.data_key or field_name).upper() >>> @add_schema(base_schema=BaseSchema) ... @dataclasses.dataclass ...", "specified by keyword, so start it with an # underscore.", "str] >>> artist = Artist.Schema().loads('{\"NAMES\": [\"Martin\", \"Ramirez\"]}') >>> artist Artist(names=('Martin',", "mypy handles dynamic base classes # https://github.com/python/mypy/issues/2813 class BaseSchema(base_schema or", "new_type.__name__ = name new_type.__supertype__ = typ # type: ignore new_type._marshmallow_field", "= type(None) _U = TypeVar(\"_U\") # Whitelist of dataclass members", "None) if newtype_supertype and inspect.isfunction(typ): return _field_by_supertype( typ=typ, default=default, newtype_supertype=newtype_supertype,", "# Base types field = _field_by_type(typ, base_schema) if field: return", "new_validators if new_validators else None metadata = {\"description\": typ.__name__, **typ_args,", "new_type(x: _U): return x new_type.__name__ = name new_type.__supertype__ = typ", "artist Artist(names=('Martin', 'Ramirez')) \"\"\" def decorator(clazz: Type[_U]) -> Type[_U]: clazz.Schema", "\"\"\" return _proxied_class_schema(clazz, base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def _proxied_class_schema( clazz: type, base_schema:", ">>> @dataclasses.dataclass ... class Anything: ... name: str ... @marshmallow.validates('name')", "validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>> @dataclass ... class MyIps: ... ips: List[IPv4] >>>", "python class, and generates a marshmallow schema for it. Simple", "typ = List[Any] elif typ is dict: typ = Dict[Any,", "in the `metadata` dictionary of a field. If you want", "information coming our custom NewType implementation typ_args = getattr(typ, \"_marshmallow_args\",", "{} if metadata is None else dict(metadata) if default is", "a dataclass and cannot be turned into one. >>> @dataclasses.dataclass", "try: return class_schema(dataclasses.dataclass(clazz), base_schema) except Exception: raise TypeError( f\"{getattr(clazz, '__name__',", "type check dc = dataclasses.dataclass( # type: ignore _cls, repr=repr,", "class (may be a dataclass) :param base_schema: marshmallow schema used", "return cast(Type[marshmallow.Schema], schema_class) def _field_by_type( typ: Union[type, Any], base_schema: Optional[Type[marshmallow.Schema]]", "x:float ... y:float ... Schema: ClassVar[Type[Schema]] = Schema # For", "field(metadata = { \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field })", "generated schemas that class_schema keeps of generated schemas. Removes duplicates.", ":param typ: The type for which a field should be", "schema members to contain marshmallow fields instead of dataclass fields", "type checked Point(x=0.0, y=0.0) \"\"\" # dataclass's typing doesn't expect", "base_schema ), ) for field in fields if field.init )", "_get_field_default(dataclasses.field()) <marshmallow.missing> \"\"\" # Remove `type: ignore` when https://github.com/python/mypy/issues/6910 is", "marshmallow field constructor :param base_schema: marshmallow schema used as a", "keyword arguments passed to this function will be transmitted to", "elif callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"] = new_validators if new_validators else None", "All the keyword arguments passed to this function will be", "recent call last): ... marshmallow.exceptions.ValidationError: {'ips': {1: ['String does not", "raise marshmallow.ValidationError(\"Name too long\") >>> class_schema(Anything)().load({\"name\": \"aaaaaargh\"}) Traceback (most recent", "User: birth: datetime.date = field(metadata= { \"required\": True # A", "field(**metadata) if typ is Any: metadata.setdefault(\"allow_none\", True) return marshmallow.fields.Raw(**metadata) #", "or k in MEMBERS_WHITELIST } # Update the schema members", "nested_schema = getattr(typ, \"Schema\", None) # Nested dataclasses forward_reference =", "None ) -> Type[_U]: ... def add_schema(_cls=None, base_schema=None): \"\"\" This", "dataclasses.field(init=True, default=0) ... # Only fields that are in the", "dataclasses forward_reference = getattr(typ, \"__forward_arg__\", None) nested = ( nested_schema", "child_type = field_for_schema(arguments[0], base_schema=base_schema) list_type = type_mapping.get(List, marshmallow.fields.List) return list_type(child_type,", "@overload def add_schema( base_schema: Type[marshmallow.Schema] = None, ) -> Callable[[Type[_U]],", "**metadata) # Nested marshmallow dataclass nested_schema = getattr(typ, \"Schema\", None)", "can be statically type checked Point(x=0.0, y=0.0) \"\"\" # dataclass's", "int = dataclasses.field(init=True, default=0) ... # Only fields that are", "classes can be passed in the `metadata` dictionary of a", "ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>') >>> ContactInfo.Schema().load({\"mail\": \"grumble grumble\"}) Traceback (most recent call", "# Custom marshmallow field ... }) ... >>> class_schema(Website)().load({\"url\": \"I", "be added: ... unimportant: int = dataclasses.field(init=False, default=0) ... >>>", "dataclasses.Field): \"\"\" Return a marshmallow default value given a dataclass", "bool = True, eq: bool = True, order: bool =", "= next(t for t in arguments if t is not", "Union, cast, overload, ) import dataclasses import marshmallow import typing_inspect", "marshmallow.ValidationError(\"Name too long\") >>> class_schema(Anything)().load({\"name\": \"aaaaaargh\"}) Traceback (most recent call", "classes # https://github.com/python/mypy/issues/2813 class BaseSchema(base_schema or marshmallow.Schema): # type: ignore", "t in arguments if t is not NoneType) # type:", "= NewType('Email', str, field=marshmallow.fields.Email) >>> @dataclass ... class ContactInfo: ...", "typing_inspect.get_args(typ, True) # Override base_schema.TYPE_MAPPING to change the class used", "a dataclass try: return class_schema(dataclasses.dataclass(clazz), base_schema) except Exception: raise TypeError(", "... # Only fields that are in the __init__ method", "Remove `type: ignore` when https://github.com/python/mypy/issues/6910 is fixed default_factory = field.default_factory", "Mapping[str, Any] = None, base_schema: Optional[Type[marshmallow.Schema]] = None, ) ->", "type: ignore if default_factory is not dataclasses.MISSING: return default_factory elif", "ordered = True ... >>> class_schema(Building) # Returns a marshmallow", "noinspection PyDataclass fields: Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz) except TypeError: #", "a marshmallow schema as the 'Schema' attribute in a dataclass.", "is used to detect if this # decorator is being", "= field.default_factory # type: ignore if default_factory is not dataclasses.MISSING:", "List[Building] = dataclasses.field(default_factory=lambda: []) ... >>> citySchema = class_schema(City)() >>>", "does not match expected pattern.']}} >>> MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1']) >>>", "class_schema(City)() >>> city = citySchema.load({\"name\":\"Paris\", \"best_building\": {\"name\": \"Eiffel Tower\"}}) >>>", "\"\"\" Return a marshmallow default value given a dataclass default", "field # type: ignore new_type._marshmallow_args = kwargs # type: ignore", "next(t for t in arguments if t is not NoneType)", "object :param base_schema: marshmallow schema used as a base class", "base_schema and base_schema.TYPE_MAPPING.get(typ) ) or marshmallow.Schema.TYPE_MAPPING.get(typ) def _field_by_supertype( typ: Type,", "Schema from dataclasses import field from marshmallow_dataclass import dataclass import", "= super().load(data, many=many, **kwargs) many = self.many if many is", "another dataclasses. A schema will be created for it too.", "_proxied_class_schema(clazz, base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def _proxied_class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] =", "**metadata} field = getattr(typ, \"_marshmallow_field\", None) if field: return field(**metadata)", "Optional, Set, Tuple, Type, TypeVar, Union, cast, overload, ) import", "ignore new_type._marshmallow_args = kwargs # type: ignore return new_type if", "marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"]) elif callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"] = new_validators if new_validators", "creates simple unique types to which you can attach custom", "None) if field: return field(**metadata) else: return field_for_schema( newtype_supertype, metadata=metadata,", "an instance) <class 'marshmallow.schema.Building'> >>> @dataclasses.dataclass() ... class City: ...", "def new_type(x: _U): return x new_type.__name__ = name new_type.__supertype__ =", "TypeVar(\"_U\") # Whitelist of dataclass members that will be copied", "List): child_type = field_for_schema(arguments[0], base_schema=base_schema) list_type = type_mapping.get(List, marshmallow.fields.List) return", "... y:float ... Schema: ClassVar[Type[Schema]] = Schema # For the", "# type: ignore # Treat optional types as types with", "\"\"\" # Remove `type: ignore` when https://github.com/python/mypy/issues/6910 is fixed default_factory", "base class when deriving dataclass schema >>> class BaseSchema(marshmallow.Schema): ...", "not be set for required fields. if not metadata.get(\"required\"): metadata.setdefault(\"missing\",", "field_for_schema(arg, base_schema=base_schema) for arg in arguments ) tuple_type = type_mapping.get(Tuple,", "if base_schema else {} if origin in (list, List): child_type", "\"grumble grumble\"]}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'ips':", "pass to marshmallow's field }) website:str = field(metadata = {", "field: return field(**metadata) else: return field_for_schema( newtype_supertype, metadata=metadata, default=default, base_schema=base_schema,", "= tuple( field_for_schema(arg, base_schema=base_schema) for arg in arguments ) tuple_type", "# Max number of generated schemas that class_schema keeps of", "_field_by_supertype( typ=typ, default=default, newtype_supertype=newtype_supertype, metadata=metadata, base_schema=base_schema, ) # enumerations if", "_base_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]:", "default=9, metadata=dict(required=True)) >>> int_field.__class__ <class 'marshmallow.fields.Integer'> >>> int_field.default 9 >>>", "return marshmallow.fields.Nested(nested, **metadata) def _base_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] =", "not. def dataclass( _cls: Type[_U] = None, *, repr: bool", "return [clazz(**loaded) for loaded in all_loaded] else: return clazz(**all_loaded) return", "OrderedDict([('height', None), ('name', 'Eiffel Tower')]) >>> @dataclasses.dataclass() ... class Person:", "= Schema # For the type checker \"\"\" import inspect", ") -> Type[marshmallow.Schema]: \"\"\" Base schema factory that creates a", "Optional[Type[marshmallow.Schema]] ) -> Optional[Type[marshmallow.fields.Field]]: return ( base_schema and base_schema.TYPE_MAPPING.get(typ) )", "Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: \"\"\" Base schema factory", "ignore new_type._marshmallow_field = field # type: ignore new_type._marshmallow_args = kwargs", "): \"\"\" This decorator does the same as dataclasses.dataclass, but", ") -> marshmallow.fields.Field: \"\"\" Return a new field for fields", "metadata: Mapping[str, Any] = None, base_schema: Optional[Type[marshmallow.Schema]] = None, )", "field_for_schema( subtyp, metadata=metadata, base_schema=base_schema ), ) for subtyp in arguments", "[{\"name\": \"<NAME>\"}] ... }) >>> person Person(name='Anonymous', friends=[Person(name='<NAME>', friends=[])]) >>>", "[]) ... >>> citySchema = class_schema(City)() >>> city = citySchema.load({\"name\":\"Paris\",", "Type[marshmallow.Schema]: \"\"\" Convert a class to a marshmallow schema :param", "`base_schema` or `BaseSchema` \"\"\" # Remove `type: ignore` when mypy", "bool = None, **kwargs): all_loaded = super().load(data, many=many, **kwargs) many", "valid URL.']} >>> @dataclasses.dataclass ... class NeverValid: ... @marshmallow.validates_schema ...", "\"<NAME>\"}] ... }) >>> person Person(name='Anonymous', friends=[Person(name='<NAME>', friends=[])]) >>> @dataclasses.dataclass()", "None metadata = {\"description\": typ.__name__, **typ_args, **metadata} field = getattr(typ,", ">>> @dataclass(order=True) # preserve field order ... class Point: ...", "marshmallow_dataclass import dataclass import datetime @dataclass class User: birth: datetime.date", "metadata=metadata, base_schema=base_schema) elif typing_inspect.is_union_type(typ): from . import union_field return union_field.Union(", ">>> from marshmallow import Schema >>> @dataclass(order=True) # preserve field", "1024 # _cls should never be specified by keyword, so", "Custom marshmallow field }) Schema: ClassVar[Type[Schema]] = Schema # For", "import marshmallow_enum return marshmallow_enum.EnumField(typ, **metadata) # Nested marshmallow dataclass nested_schema", "dataclasses.field(default=\"<EMAIL>\") >>> ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>') >>> ContactInfo.Schema().load({\"mail\": \"grumble grumble\"}) Traceback (most", "__all__ = [\"dataclass\", \"add_schema\", \"class_schema\", \"field_for_schema\", \"NewType\"] NoneType = type(None)", ">>> Point.Schema().load({'x':0, 'y':0}) # This line can be statically type", ">>> city City(name='Paris', best_building=Building(height=None, name='Eiffel Tower'), other_buildings=[]) >>> citySchema.load({\"name\":\"Paris\"}) Traceback", "import dataclasses import marshmallow import typing_inspect __all__ = [\"dataclass\", \"add_schema\",", "arguments if t is not NoneType) # type: ignore #", "used as a base class when deriving dataclass schema >>>", "url:str = dataclasses.field(metadata = { ... \"marshmallow_field\": marshmallow.fields.Url() # Custom", "dataclasses import marshmallow import typing_inspect __all__ = [\"dataclass\", \"add_schema\", \"class_schema\",", "City: ... name: str = dataclasses.field(metadata={'required':True}) ... best_building: Building #", "_cls is used to detect if this # decorator is", "... def validates(self, value): ... if len(value) > 5: raise", "= { ... \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field ...", "if isinstance(typ, EnumMeta): import marshmallow_enum return marshmallow_enum.EnumField(typ, **metadata) # Nested", "-> Type[marshmallow.Schema]: \"\"\" Base schema factory that creates a schema", "List['Person'] = dataclasses.field(default_factory=lambda:[]) # Recursive field ... >>> person =", "generated :param default: value to use for (de)serialization when the", "name='Eiffel Tower'), other_buildings=[]) >>> citySchema.load({\"name\":\"Paris\"}) Traceback (most recent call last):", "class when deriving dataclass schema >>> int_field = field_for_schema(int, default=9,", "# Reference to another dataclasses. A schema will be created", "default is not marshmallow.missing: metadata.setdefault(\"default\", default) # 'missing' must not", "new_validators: List[Callable] = [] for meta_dict in (typ_args, metadata): if", "a schema for `clazz` derived either from `base_schema` or `BaseSchema`", "str, field=marshmallow.fields.Email) >>> @dataclass ... class ContactInfo: ... mail: Email", "\"required\": True # A parameter to pass to marshmallow's field", "as the 'Schema' attribute in a dataclass. It uses :func:`class_schema`", "our custom NewType implementation typ_args = getattr(typ, \"_marshmallow_args\", {}) #", "= Point.Schema().dumps(point) Full example:: from marshmallow import Schema from dataclasses", "def validate(self, data, **_): ... raise marshmallow.ValidationError('never valid') ... >>>", "not match expected pattern.']}} >>> MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1']) >>> Email", "import dataclass @dataclass class Point: x:float y:float point = Point(x=0,", "custom marshmallow field (one that has no equivalent python type),", "all_loaded = super().load(data, many=many, **kwargs) many = self.many if many", ">>> @dataclasses.dataclass() ... class Person: ... name: str = dataclasses.field(default=\"Anonymous\")", ":func:`class_schema` internally. :param type cls: The dataclass to which a", ".. note:: All the arguments supported by marshmallow field classes", "# 'missing' must not be set for required fields. if", "), ) for subtyp in arguments ], **metadata, ) #", "of generated schemas. Removes duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE = 1024 # _cls", "Reference to another dataclasses. A schema will be created for", "origin in (dict, Dict): dict_type = type_mapping.get(Dict, marshmallow.fields.Dict) return dict_type(", "Point(x=0, y=0) point_json = Point.Schema().dumps(point) Full example:: from marshmallow import", "city City(name='Paris', best_building=Building(height=None, name='Eiffel Tower'), other_buildings=[]) >>> citySchema.load({\"name\":\"Paris\"}) Traceback (most", "marshmallow import typing_inspect __all__ = [\"dataclass\", \"add_schema\", \"class_schema\", \"field_for_schema\", \"NewType\"]", ">>> city_json = citySchema.dump(city) >>> city_json['best_building'] # We get an", "last): ... marshmallow.exceptions.ValidationError: {'url': ['Not a valid URL.']} >>> @dataclasses.dataclass", "cannot be turned into one.\" ) # Copy all marshmallow", "in (typ_args, metadata): if \"validate\" in meta_dict: if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"])", "type. The metadata of the dataclass field is used as", "... class Meta: ... ordered = True ... >>> class_schema(Building)", "dataclasses.dataclass, but also applies :func:`add_schema`. It adds a `.Schema` attribute", "field }) Schema: ClassVar[Type[Schema]] = Schema # For the type", "order: bool = False, unsafe_hash: bool = False, frozen: bool", "NewType implementation typ_args = getattr(typ, \"_marshmallow_args\", {}) # Handle multiple", "marshmallow field ... }) ... >>> class_schema(Website)().load({\"url\": \"I am not", "{'_schema': ['never valid']} >>> # noinspection PyTypeChecker >>> class_schema(None) #", "this # decorator is being called with parameters or not.", "**metadata) elif origin in (dict, Dict): dict_type = type_mapping.get(Dict, marshmallow.fields.Dict)", ">>> person = class_schema(Person)().load({ ... \"friends\": [{\"name\": \"<NAME>\"}] ... })", "['Name too long']} \"\"\" return _proxied_class_schema(clazz, base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def _proxied_class_schema(", "for field in fields if field.init ) schema_class = type(clazz.__name__,", "True) # If the field was already defined by the", "unsupported type Traceback (most recent call last): ... TypeError: None", ">>> int_field = field_for_schema(int, default=9, metadata=dict(required=True)) >>> int_field.__class__ <class 'marshmallow.fields.Integer'>", "is None else bool(many) if many: return [clazz(**loaded) for loaded", "For the type checker ... >>> Point.Schema().load({'x':0, 'y':0}) # This", "of generated schemas that class_schema keeps of generated schemas. Removes", "field was already defined by the user predefined_field = metadata.get(\"marshmallow_field\")", "that creates a schema for `clazz` derived either from `base_schema`", "derived either from `base_schema` or `BaseSchema` \"\"\" # Remove `type:", "List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast, overload,", "def class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None ) ->", "required fields. if not metadata.get(\"required\"): metadata.setdefault(\"missing\", default) else: metadata.setdefault(\"required\", True)", "be transmitted to the marshmallow field constructor. >>> import marshmallow.validate", "to the class object :param base_schema: marshmallow schema used as", "... class NeverValid: ... @marshmallow.validates_schema ... def validate(self, data, **_):", "also applies :func:`add_schema`. It adds a `.Schema` attribute to the", ") if _cls is None: return lambda cls: add_schema(dc(cls), base_schema)", "will be transmitted to the marshmallow field constructor. >>> import", "Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast,", "as dataclasses.dataclass, but also applies :func:`add_schema`. It adds a `.Schema`", "marshmallow field }) Schema: ClassVar[Type[Schema]] = Schema # For the", "of dataclass members that will be copied to generated schema.", "This library allows the conversion of python 3.7's :mod:`dataclasses` to", "in arguments if t is not NoneType) # type: ignore", "long']} \"\"\" return _proxied_class_schema(clazz, base_schema) @lru_cache(maxsize=MAX_CLASS_SCHEMA_CACHE_SIZE) def _proxied_class_schema( clazz: type,", "Meters = typing.NewType('Meters', float) >>> @dataclasses.dataclass() ... class Building: ...", "# type: ignore _cls, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen )", "fields: Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz) except TypeError: # Not a", "newtype_supertype, metadata=metadata, default=default, base_schema=base_schema, ) def field_for_schema( typ: type, default=marshmallow.missing,", "many=many, **kwargs) many = self.many if many is None else", "the field is missing :param metadata: Additional parameters to pass", "enumerations if isinstance(typ, EnumMeta): import marshmallow_enum return marshmallow_enum.EnumField(typ, **metadata) #", "that has no equivalent python type), you can pass it", "_cls should never be specified by keyword, so start it", "bool = False, base_schema: Optional[Type[marshmallow.Schema]] = None, ): \"\"\" This", "None, ) -> Callable[[Type[_U]], Type[_U]]: ... @overload def add_schema( _cls:", "too. ... other_buildings: List[Building] = dataclasses.field(default_factory=lambda: []) ... >>> citySchema", "Type[marshmallow.Schema] = None ) -> Type[_U]: ... def add_schema(_cls=None, base_schema=None):", "MEMBERS_WHITELIST: Set[str] = {\"Meta\"} # Max number of generated schemas", "None ) -> Type[marshmallow.Schema]: \"\"\" Base schema factory that creates", "Nested dataclasses forward_reference = getattr(typ, \"__forward_arg__\", None) nested = (", "or not. def dataclass( _cls: Type[_U] = None, *, repr:", "def add_schema(_cls: Type[_U]) -> Type[_U]: ... @overload def add_schema( base_schema:", "field.name, field_for_schema( field.type, _get_field_default(field), field.metadata, base_schema ), ) for field", "to use for (de)serialization when the field is missing :param", "Additional parameters to pass to the marshmallow field constructor :param", "class_schema(Person)().load({ ... \"friends\": [{\"name\": \"<NAME>\"}] ... }) >>> person Person(name='Anonymous',", "Update the schema members to contain marshmallow fields instead of", "Type[_U], base_schema: Type[marshmallow.Schema] = None ) -> Type[_U]: ... def", "@overload def add_schema(_cls: Type[_U]) -> Type[_U]: ... @overload def add_schema(", "'Schema' attribute in a dataclass. It uses :func:`class_schema` internally. :param", "None), ('name', 'Eiffel Tower')]) >>> @dataclasses.dataclass() ... class Person: ...", "be added :param base_schema: marshmallow schema used as a base", "metadata[\"validate\"] = new_validators if new_validators else None metadata = {\"description\":", "in meta_dict: if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"]) elif callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"] =", "of dataclass fields attributes.update( ( field.name, field_for_schema( field.type, _get_field_default(field), field.metadata,", "... marshmallow.exceptions.ValidationError: {'mail': ['Not a valid email address.']} \"\"\" def", "optional types as types with a None default metadata[\"default\"] =", "dataclass's typing doesn't expect it to be called as a", "# noinspection PyTypeChecker >>> class_schema(None) # unsupported type Traceback (most", ">>> artist Artist(names=('Martin', 'Ramirez')) \"\"\" def decorator(clazz: Type[_U]) -> Type[_U]:", "will NOT be imported ... }, unknown=marshmallow.EXCLUDE) >>> c C(important=9,", "... \"friends\": [{\"name\": \"<NAME>\"}] ... }) >>> person Person(name='Anonymous', friends=[Person(name='<NAME>',", "dataclass import datetime @dataclass class User: birth: datetime.date = field(metadata=", "a dataclass and cannot be turned into one.\" ) #", "... class ContactInfo: ... mail: Email = dataclasses.field(default=\"<EMAIL>\") >>> ContactInfo.Schema().load({})", "`clazz` derived either from `base_schema` or `BaseSchema` \"\"\" # Remove", "def field_for_schema( typ: type, default=marshmallow.missing, metadata: Mapping[str, Any] = None,", "for required field.']} >>> city_json = citySchema.dump(city) >>> city_json['best_building'] #", "field.default def NewType( name: str, typ: Type[_U], field: Optional[Type[marshmallow.fields.Field]] =", "# Override base_schema.TYPE_MAPPING to change the class used for generic", "marshmallow schema class (not an instance) <class 'marshmallow.schema.Building'> >>> @dataclasses.dataclass()", "base_schema),), attributes) return cast(Type[marshmallow.Schema], schema_class) def _field_by_type( typ: Union[type, Any],", "long\") >>> class_schema(Anything)().load({\"name\": \"aaaaaargh\"}) Traceback (most recent call last): ...", "9 # This field will NOT be imported ... },", "@dataclass(order=True) # preserve field order ... class Point: ... x:float", "names: Tuple[str, str] >>> artist = Artist.Schema().loads('{\"NAMES\": [\"Martin\", \"Ramirez\"]}') >>>", "import ClassVar >>> from marshmallow import Schema >>> @dataclass(order=True) #", "a dataclass default value >>> _get_field_default(dataclasses.field()) <marshmallow.missing> \"\"\" # Remove", "_cls else decorator def class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] =", "was already defined by the user predefined_field = metadata.get(\"marshmallow_field\") if", "= name new_type.__supertype__ = typ # type: ignore new_type._marshmallow_field =", "fields based on a super field. (Usually spawned from NewType)", "(dict, Dict): dict_type = type_mapping.get(Dict, marshmallow.fields.Dict) return dict_type( keys=field_for_schema(arguments[0], base_schema=base_schema),", "new field for fields based on a super field. (Usually", "class Artist: ... name: str >>> Artist.Schema <class 'marshmallow.schema.Artist'> >>>", "# Returns a marshmallow schema class (not an instance) <class", "a function with a __supertype__ attribute newtype_supertype = getattr(typ, \"__supertype__\",", "{}) # Handle multiple validators from both `typ` and `metadata`.", "it to be called as a function, so ignore type", "enum import EnumMeta from functools import lru_cache from typing import", "None ) -> Type[marshmallow.Schema]: try: # noinspection PyDataclass fields: Tuple[dataclasses.Field,", "am not a good URL !\"}) Traceback (most recent call", "Dict[Any, Any] # Base types field = _field_by_type(typ, base_schema) if", "@dataclasses.dataclass ... class Anything: ... name: str ... @marshmallow.validates('name') ...", "if hasattr(v, \"__marshmallow_hook__\") or k in MEMBERS_WHITELIST } # Update", "duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE = 1024 # _cls should never be specified", "# If the field was already defined by the user", "... >>> class_schema(Website)().load({\"url\": \"I am not a good URL !\"})", "... marshmallow.exceptions.ValidationError: {'_schema': ['never valid']} >>> # noinspection PyTypeChecker >>>", "schema >>> class BaseSchema(marshmallow.Schema): ... def on_bind_field(self, field_name, field_obj): ...", "['Not a valid email address.']} \"\"\" def new_type(x: _U): return", "to the marshmallow field constructor. >>> import marshmallow.validate >>> IPv4", "base_schema=base_schema) list_type = type_mapping.get(List, marshmallow.fields.List) return list_type(child_type, **metadata) if origin", "... >>> c = class_schema(C)().load({ ... \"important\": 9, # This", "no equivalent python type), you can pass it as the", "repr(clazz))} is not a dataclass and cannot be turned into", "to pass to the marshmallow field constructor :param base_schema: marshmallow", "if _cls is None: return lambda cls: add_schema(dc(cls), base_schema) return", "# Remove `type: ignore` when https://github.com/python/mypy/issues/6910 is fixed default_factory =", "{ k: v for k, v in inspect.getmembers(clazz) if hasattr(v,", "EnumMeta from functools import lru_cache from typing import ( Any,", "# See https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators: List[Callable] = [] for meta_dict in", "data for required field.']} >>> city_json = citySchema.dump(city) >>> city_json['best_building']", "# type: ignore if default_factory is not dataclasses.MISSING: return default_factory", "marshmallow.ValidationError('never valid') ... >>> class_schema(NeverValid)().load({}) Traceback (most recent call last):", "noinspection PyTypeChecker >>> class_schema(None) # unsupported type Traceback (most recent", "Full example:: from marshmallow import Schema from dataclasses import field", "if predefined_field: return predefined_field # Generic types specified without type", "The type for which a field should be generated :param", "Meta class OrderedDict([('height', None), ('name', 'Eiffel Tower')]) >>> @dataclasses.dataclass() ...", "cannot be turned into one. >>> @dataclasses.dataclass ... class Anything:", "_get_field_default(field), field.metadata, base_schema ), ) for field in fields if", "import dataclass import datetime @dataclass class User: birth: datetime.date =", "TypeError: # Not a dataclass try: return class_schema(dataclasses.dataclass(clazz), base_schema) except", "not NoneType) # type: ignore # Treat optional types as", "class_schema(NeverValid)().load({}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'_schema': ['never", "python class (may be a dataclass) :param base_schema: marshmallow schema", "Schema corresponding to the dataclass .. note:: All the arguments", "Field corresponding to the given python type. The metadata of", "= field(metadata= { \"required\": True # A parameter to pass", "base_schema: Optional[Type[marshmallow.Schema]] = None, ): \"\"\" This decorator does the", "typ is list: typ = List[Any] elif typ is dict:", "dataclass fields attributes.update( ( field.name, field_for_schema( field.type, _get_field_default(field), field.metadata, base_schema", "transmitted to the marshmallow field constructor. >>> import marshmallow.validate >>>", "to :mod:`marshmallow` schemas. It takes a python class, and generates", "with a None default metadata[\"default\"] = metadata.get(\"default\", None) metadata[\"missing\"] =", "marshmallow import Schema from marshmallow_dataclass import dataclass @dataclass class Point:", "to the schema. attributes = { k: v for k,", "-> Type[_U]: ... @overload def add_schema( base_schema: Type[marshmallow.Schema] = None,", "from typing import ( Any, Callable, Dict, List, Mapping, Optional,", "dataclass default value >>> _get_field_default(dataclasses.field()) <marshmallow.missing> \"\"\" # Remove `type:", "example:: from marshmallow import Schema from dataclasses import field from", "**metadata, ) elif typing_inspect.is_optional_type(typ): subtyp = next(t for t in", "used as a base class when deriving dataclass schema :return:", "( subtyp, field_for_schema( subtyp, metadata=metadata, base_schema=base_schema ), ) for subtyp", "False, frozen: bool = False, base_schema: Optional[Type[marshmallow.Schema]] = None, ):", "not metadata.get(\"required\"): metadata.setdefault(\"missing\", default) else: metadata.setdefault(\"required\", True) # If the", "# This field will be imported ... \"unimportant\": 9 #", "as a base class when deriving dataclass schema >>> class", "in inspect.getmembers(clazz) if hasattr(v, \"__marshmallow_hook__\") or k in MEMBERS_WHITELIST }", "metadata.get(\"missing\", None) metadata[\"required\"] = False return field_for_schema(subtyp, metadata=metadata, base_schema=base_schema) elif", "{\"Meta\"} # Max number of generated schemas that class_schema keeps", "base_schema: Optional[Type[marshmallow.Schema]] = None, ) -> marshmallow.fields.Field: \"\"\" Get a", "Any], base_schema: Optional[Type[marshmallow.Schema]] ) -> Optional[Type[marshmallow.fields.Field]]: return ( base_schema and", "types specified without type arguments if typ is list: typ", "return ( base_schema and base_schema.TYPE_MAPPING.get(typ) ) or marshmallow.Schema.TYPE_MAPPING.get(typ) def _field_by_supertype(", "base_schema=None): \"\"\" This decorator adds a marshmallow schema as the", "_proxied_class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]:", "True, eq: bool = True, order: bool = False, unsafe_hash:", "_cls, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen ) if _cls is", "base_schema=base_schema), **metadata, ) elif typing_inspect.is_optional_type(typ): subtyp = next(t for t", "if default_factory is not dataclasses.MISSING: return default_factory elif field.default is", "Meta: ... ordered = True ... >>> class_schema(Building) # Returns", "field.default_factory # type: ignore if default_factory is not dataclasses.MISSING: return", "typing_inspect.is_union_type(typ): from . import union_field return union_field.Union( [ ( subtyp,", "or `BaseSchema` \"\"\" # Remove `type: ignore` when mypy handles", "['String does not match expected pattern.']}} >>> MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1'])", "from marshmallow import Schema >>> @dataclass(order=True) # preserve field order", ") for field in fields if field.init ) schema_class =", "None, ) -> marshmallow.fields.Field: \"\"\" Get a marshmallow Field corresponding", "False, base_schema: Optional[Type[marshmallow.Schema]] = None, ): \"\"\" This decorator does", "recent call last): ... marshmallow.exceptions.ValidationError: {'name': ['Name too long']} \"\"\"", "Base types field = _field_by_type(typ, base_schema) if field: return field(**metadata)", "super().load(data, many=many, **kwargs) many = self.many if many is None", "[ ( subtyp, field_for_schema( subtyp, metadata=metadata, base_schema=base_schema ), ) for", "dc = dataclasses.dataclass( # type: ignore _cls, repr=repr, eq=eq, order=order,", "generated schemas. Removes duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE = 1024 # _cls should", "field from marshmallow_dataclass import dataclass import datetime @dataclass class User:", "dataclass .. note:: All the arguments supported by marshmallow field", "= {\"Meta\"} # Max number of generated schemas that class_schema", "field classes can be passed in the `metadata` dictionary of", "being called with parameters or not. def dataclass( _cls: Type[_U]", "# Handle multiple validators from both `typ` and `metadata`. #", "Tower\"}}) >>> city City(name='Paris', best_building=Building(height=None, name='Eiffel Tower'), other_buildings=[]) >>> citySchema.load({\"name\":\"Paris\"})", "field_for_schema( typ: type, default=marshmallow.missing, metadata: Mapping[str, Any] = None, base_schema:", "y=0.0) \"\"\" # dataclass's typing doesn't expect it to be", "python type. The metadata of the dataclass field is used", "unimportant: int = dataclasses.field(init=False, default=0) ... >>> c = class_schema(C)().load({", "value given a dataclass default value >>> _get_field_default(dataclasses.field()) <marshmallow.missing> \"\"\"", "= None, *, repr: bool = True, eq: bool =", "schema :return: A marshmallow Schema corresponding to the dataclass ..", "is not marshmallow.missing: metadata.setdefault(\"default\", default) # 'missing' must not be", ":param base_schema: marshmallow schema used as a base class when", "used for generic types below type_mapping = base_schema.TYPE_MAPPING if base_schema", "typ: Type[_U], field: Optional[Type[marshmallow.fields.Field]] = None, **kwargs, ) -> Callable[[_U],", "<class 'marshmallow.schema.Building'> >>> @dataclasses.dataclass() ... class City: ... name: str", "be turned into one.\" ) # Copy all marshmallow hooks", "if origin in (tuple, Tuple): children = tuple( field_for_schema(arg, base_schema=base_schema)", "... marshmallow.exceptions.ValidationError: {'name': ['Name too long']} \"\"\" return _proxied_class_schema(clazz, base_schema)", "passed to this function will be transmitted to the marshmallow", "return BaseSchema def _get_field_default(field: dataclasses.Field): \"\"\" Return a marshmallow default", "typing doesn't expect it to be called as a function,", "int = dataclasses.field(init=False, default=0) ... >>> c = class_schema(C)().load({ ...", "friends: List['Person'] = dataclasses.field(default_factory=lambda:[]) # Recursive field ... >>> person", "-> Type[_U]: clazz.Schema = class_schema(clazz, base_schema) # type: ignore return", "handles dynamic base classes # https://github.com/python/mypy/issues/2813 class BaseSchema(base_schema or marshmallow.Schema):", "Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'best_building': ['Missing data", "# type: ignore return clazz return decorator(_cls) if _cls else", "Remove `type: ignore` when mypy handles dynamic base classes #", "match expected pattern.']}} >>> MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1']) >>> Email =", "called with parameters or not. def dataclass( _cls: Type[_U] =", "not marshmallow.missing: metadata.setdefault(\"default\", default) # 'missing' must not be set", ":func:`add_schema`. It adds a `.Schema` attribute to the class object", "MyIps: ... ips: List[IPv4] >>> MyIps.Schema().load({\"ips\": [\"0.0.0.0\", \"grumble grumble\"]}) Traceback", "... name: str = dataclasses.field(default=\"Anonymous\") ... friends: List['Person'] = dataclasses.field(default_factory=lambda:[])", "or marshmallow.Schema): # type: ignore def load(self, data: Mapping, *,", "marshmallow.fields.Field: \"\"\" Return a new field for fields based on", "default=0) ... # Only fields that are in the __init__", "['Missing data for required field.']} >>> city_json = citySchema.dump(city) >>>", "= getattr(typ, \"__forward_arg__\", None) nested = ( nested_schema or forward_reference", "isinstance(typ, EnumMeta): import marshmallow_enum return marshmallow_enum.EnumField(typ, **metadata) # Nested marshmallow", "type cls: The dataclass to which a Schema should be", "ignore def load(self, data: Mapping, *, many: bool = None,", "... if len(value) > 5: raise marshmallow.ValidationError(\"Name too long\") >>>", "if field: return field(**metadata) else: return field_for_schema( newtype_supertype, metadata=metadata, default=default,", "subtyp, metadata=metadata, base_schema=base_schema ), ) for subtyp in arguments ],", "Type[_U] = None, *, repr: bool = True, eq: bool", "creates a schema for `clazz` derived either from `base_schema` or", ">>> @dataclasses.dataclass ... class NeverValid: ... @marshmallow.validates_schema ... def validate(self,", "= self.many if many is None else bool(many) if many:", "schema. MEMBERS_WHITELIST: Set[str] = {\"Meta\"} # Max number of generated", "... >>> class_schema(Building) # Returns a marshmallow schema class (not", "['never valid']} >>> # noinspection PyTypeChecker >>> class_schema(None) # unsupported", "the keyword arguments passed to this function will be transmitted", "bool(many) if many: return [clazz(**loaded) for loaded in all_loaded] else:", "float) >>> @dataclasses.dataclass() ... class Building: ... height: Optional[Meters] ...", "class_schema(None) # unsupported type Traceback (most recent call last): ...", "the conversion of python 3.7's :mod:`dataclasses` to :mod:`marshmallow` schemas. It", "# dataclass's typing doesn't expect it to be called as", "k in MEMBERS_WHITELIST } # Update the schema members to", "\"\"\" Base schema factory that creates a schema for `clazz`", "library allows the conversion of python 3.7's :mod:`dataclasses` to :mod:`marshmallow`", "expect it to be called as a function, so ignore", "the `metadata` dictionary of a field. If you want to", ") # Copy all marshmallow hooks and whitelisted members of", "metadata=metadata, base_schema=base_schema ), ) for subtyp in arguments ], **metadata,", "base_schema=base_schema, ) def field_for_schema( typ: type, default=marshmallow.missing, metadata: Mapping[str, Any]", "class Meta: ... ordered = True ... >>> class_schema(Building) #", "field_for_schema(subtyp, metadata=metadata, base_schema=base_schema) elif typing_inspect.is_union_type(typ): from . import union_field return", "NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>> @dataclass ... class MyIps: ... ips:", "... def validate(self, data, **_): ... raise marshmallow.ValidationError('never valid') ...", "marshmallow.exceptions.ValidationError: {'url': ['Not a valid URL.']} >>> @dataclasses.dataclass ... class", "key in the metadata dictionary. >>> import typing >>> Meters", "field }) website:str = field(metadata = { \"marshmallow_field\": marshmallow.fields.Url() #", "add_schema(dc, base_schema) @overload def add_schema(_cls: Type[_U]) -> Type[_U]: ... @overload", "**kwargs): all_loaded = super().load(data, many=many, **kwargs) many = self.many if", "... friends: List['Person'] = dataclasses.field(default_factory=lambda:[]) # Recursive field ... >>>", "https://github.com/python/mypy/issues/6910 is fixed default_factory = field.default_factory # type: ignore if", "# Generic types origin = typing_inspect.get_origin(typ) if origin: arguments =", "... >>> person = class_schema(Person)().load({ ... \"friends\": [{\"name\": \"<NAME>\"}] ...", "base_schema else {} if origin in (list, List): child_type =", "does the same as dataclasses.dataclass, but also applies :func:`add_schema`. It", "**metadata) if origin in (tuple, Tuple): children = tuple( field_for_schema(arg,", ") -> Type[marshmallow.Schema]: \"\"\" Convert a class to a marshmallow", "is Any: metadata.setdefault(\"allow_none\", True) return marshmallow.fields.Raw(**metadata) # Generic types origin", "... class City: ... name: str = dataclasses.field(metadata={'required':True}) ... best_building:", "checker ... >>> Point.Schema().load({'x':0, 'y':0}) # This line can be", "return _field_by_supertype( typ=typ, default=default, newtype_supertype=newtype_supertype, metadata=metadata, base_schema=base_schema, ) # enumerations", "MyIps.Schema().load({\"ips\": [\"127.0.0.1\"]}) MyIps(ips=['127.0.0.1']) >>> Email = NewType('Email', str, field=marshmallow.fields.Email) >>>", "decorator does the same as dataclasses.dataclass, but also applies :func:`add_schema`.", "Person: ... name: str = dataclasses.field(default=\"Anonymous\") ... friends: List['Person'] =", ") -> marshmallow.fields.Field: \"\"\" Get a marshmallow Field corresponding to", "= metadata.get(\"missing\", None) metadata[\"required\"] = False return field_for_schema(subtyp, metadata=metadata, base_schema=base_schema)", "a base class when deriving dataclass schema :return: A marshmallow", "friends=[Person(name='<NAME>', friends=[])]) >>> @dataclasses.dataclass() ... class C: ... important: int", "def _get_field_default(field: dataclasses.Field): \"\"\" Return a marshmallow default value given", "be passed in the `metadata` dictionary of a field. If", "last): ... TypeError: None is not a dataclass and cannot", "without type arguments if typ is list: typ = List[Any]", "inspect.getmembers(clazz) if hasattr(v, \"__marshmallow_hook__\") or k in MEMBERS_WHITELIST } #", "Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: try: # noinspection PyDataclass", "def add_schema(_cls=None, base_schema=None): \"\"\" This decorator adds a marshmallow schema", "takes a python class, and generates a marshmallow schema for", "def _field_by_type( typ: Union[type, Any], base_schema: Optional[Type[marshmallow.Schema]] ) -> Optional[Type[marshmallow.fields.Field]]:", "... @dataclasses.dataclass ... class Artist: ... names: Tuple[str, str] >>>", "{} if origin in (list, List): child_type = field_for_schema(arguments[0], base_schema=base_schema)", "return field_for_schema( newtype_supertype, metadata=metadata, default=default, base_schema=base_schema, ) def field_for_schema( typ:", "a python class, and generates a marshmallow schema for it.", "dataclasses.field(metadata={'required':True}) ... best_building: Building # Reference to another dataclasses. A", "= getattr(typ, \"_marshmallow_field\", None) if field: return field(**metadata) else: return", "{ \"required\": True # A parameter to pass to marshmallow's", "a __supertype__ attribute newtype_supertype = getattr(typ, \"__supertype__\", None) if newtype_supertype", "typ: Union[type, Any], base_schema: Optional[Type[marshmallow.Schema]] ) -> Optional[Type[marshmallow.fields.Field]]: return (", "members that will be copied to generated schema. MEMBERS_WHITELIST: Set[str]", "return marshmallow.missing return field.default def NewType( name: str, typ: Type[_U],", "keeps of generated schemas. Removes duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE = 1024 #", "into one. >>> @dataclasses.dataclass ... class Anything: ... name: str", "the field was already defined by the user predefined_field =", "None else bool(many) if many: return [clazz(**loaded) for loaded in", "_field_by_supertype( typ: Type, default: marshmallow.missing, newtype_supertype: Type, metadata: dict, base_schema:", "NewType('Email', str, field=marshmallow.fields.Email) >>> @dataclass ... class ContactInfo: ... mail:", ") -> Type[_U]: ... def add_schema(_cls=None, base_schema=None): \"\"\" This decorator", "IPv4 = NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>> @dataclass ... class MyIps:", "when mypy handles dynamic base classes # https://github.com/python/mypy/issues/2813 class BaseSchema(base_schema", "corresponding to the dataclass .. note:: All the arguments supported", "*, repr: bool = True, eq: bool = True, order:", "never be specified by keyword, so start it with an", "default: value to use for (de)serialization when the field is", "recent call last): ... marshmallow.exceptions.ValidationError: {'best_building': ['Missing data for required", "dict, base_schema: Optional[Type[marshmallow.Schema]], ) -> marshmallow.fields.Field: \"\"\" Return a new", "marshmallow.fields.Nested(nested, **metadata) def _base_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None", "{ ... \"marshmallow_field\": marshmallow.fields.Url() # Custom marshmallow field ... })", "This line can be statically type checked Point(x=0.0, y=0.0) \"\"\"", "= kwargs # type: ignore return new_type if __name__ ==", "... best_building: Building # Reference to another dataclasses. A schema", "Artist: ... names: Tuple[str, str] >>> artist = Artist.Schema().loads('{\"NAMES\": [\"Martin\",", "marshmallow.fields.Field: \"\"\" Get a marshmallow Field corresponding to the given", "(Usually spawned from NewType) \"\"\" # Add the information coming", "same as dataclasses.dataclass, but also applies :func:`add_schema`. It adds a", "_cls: Type[_U] = None, *, repr: bool = True, eq:", "or class_schema(typ, base_schema=base_schema) ) return marshmallow.fields.Nested(nested, **metadata) def _base_schema( clazz:", "( Any, Callable, Dict, List, Mapping, Optional, Set, Tuple, Type,", "\"\"\" Get a marshmallow Field corresponding to the given python", "... class MyIps: ... ips: List[IPv4] >>> MyIps.Schema().load({\"ips\": [\"0.0.0.0\", \"grumble", "field_obj): ... field_obj.data_key = (field_obj.data_key or field_name).upper() >>> @add_schema(base_schema=BaseSchema) ...", "Schema from marshmallow_dataclass import dataclass @dataclass class Point: x:float y:float", "not a dataclass and cannot be turned into one. >>>", "(most recent call last): ... TypeError: None is not a", "f\"{getattr(clazz, '__name__', repr(clazz))} is not a dataclass and cannot be", "marshmallow_enum.EnumField(typ, **metadata) # Nested marshmallow dataclass nested_schema = getattr(typ, \"Schema\",", "= dataclasses.field(default=\"Anonymous\") ... friends: List['Person'] = dataclasses.field(default_factory=lambda:[]) # Recursive field", "This decorator adds a marshmallow schema as the 'Schema' attribute", "... \"unimportant\": 9 # This field will NOT be imported", "corresponding to the given python type. The metadata of the", "new_type._marshmallow_args = kwargs # type: ignore return new_type if __name__", "import lru_cache from typing import ( Any, Callable, Dict, List,", "and cannot be turned into one. >>> @dataclasses.dataclass ... class", "attributes = { k: v for k, v in inspect.getmembers(clazz)", "field.metadata, base_schema ), ) for field in fields if field.init", "in (dict, Dict): dict_type = type_mapping.get(Dict, marshmallow.fields.Dict) return dict_type( keys=field_for_schema(arguments[0],", "def validates(self, value): ... if len(value) > 5: raise marshmallow.ValidationError(\"Name", "typ # type: ignore new_type._marshmallow_field = field # type: ignore", "metadata[\"missing\"] = metadata.get(\"missing\", None) metadata[\"required\"] = False return field_for_schema(subtyp, metadata=metadata,", "if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]): new_validators.extend(meta_dict[\"validate\"]) elif callable(meta_dict[\"validate\"]): new_validators.append(meta_dict[\"validate\"]) metadata[\"validate\"] = new_validators if", "default) # 'missing' must not be set for required fields.", "is used as arguments to the marshmallow Field. :param typ:", "Building: ... height: Optional[Meters] ... name: str = dataclasses.field(default=\"anonymous\") ...", "is dict: typ = Dict[Any, Any] # Base types field", "> 5: raise marshmallow.ValidationError(\"Name too long\") >>> class_schema(Anything)().load({\"name\": \"aaaaaargh\"}) Traceback", "given a dataclass default value >>> _get_field_default(dataclasses.field()) <marshmallow.missing> \"\"\" #", "# For the type checker ... >>> Point.Schema().load({'x':0, 'y':0}) #", "is not a dataclass and cannot be turned into one.\"", "for arg in arguments ) tuple_type = type_mapping.get(Tuple, marshmallow.fields.Tuple) return", "class City: ... name: str = dataclasses.field(metadata={'required':True}) ... best_building: Building", "... class Building: ... height: Optional[Meters] ... name: str =", "MAX_CLASS_SCHEMA_CACHE_SIZE = 1024 # _cls should never be specified by", "field=marshmallow.fields.Email) >>> @dataclass ... class ContactInfo: ... mail: Email =", "important: int = dataclasses.field(init=True, default=0) ... # Only fields that", "with parameters or not. def dataclass( _cls: Type[_U] = None,", "@dataclasses.dataclass ... class NeverValid: ... @marshmallow.validates_schema ... def validate(self, data,", "@overload def add_schema( _cls: Type[_U], base_schema: Type[marshmallow.Schema] = None )", "None is not a dataclass and cannot be turned into", "Not a dataclass try: return class_schema(dataclasses.dataclass(clazz), base_schema) except Exception: raise", "or forward_reference or class_schema(typ, base_schema=base_schema) ) return marshmallow.fields.Nested(nested, **metadata) def", "return x new_type.__name__ = name new_type.__supertype__ = typ # type:", "TypeError( f\"{getattr(clazz, '__name__', repr(clazz))} is not a dataclass and cannot", "functools import lru_cache from typing import ( Any, Callable, Dict,", "str = dataclasses.field(default=\"Anonymous\") ... friends: List['Person'] = dataclasses.field(default_factory=lambda:[]) # Recursive", "Type[marshmallow.Schema]: try: # noinspection PyDataclass fields: Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz)", "import Schema from dataclasses import field from marshmallow_dataclass import dataclass", "None, ): \"\"\" This decorator does the same as dataclasses.dataclass,", "metadata=metadata, default=default, base_schema=base_schema, ) def field_for_schema( typ: type, default=marshmallow.missing, metadata:", "on a super field. (Usually spawned from NewType) \"\"\" #", "... class Artist: ... name: str >>> Artist.Schema <class 'marshmallow.schema.Artist'>", "all_loaded] else: return clazz(**all_loaded) return BaseSchema def _get_field_default(field: dataclasses.Field): \"\"\"", "turned into one. >>> @dataclasses.dataclass ... class Anything: ... name:", "in arguments ) tuple_type = type_mapping.get(Tuple, marshmallow.fields.Tuple) return tuple_type(children, **metadata)", "v in inspect.getmembers(clazz) if hasattr(v, \"__marshmallow_hook__\") or k in MEMBERS_WHITELIST", "or field_name).upper() >>> @add_schema(base_schema=BaseSchema) ... @dataclasses.dataclass ... class Artist: ...", "be imported ... \"unimportant\": 9 # This field will NOT", "it. Simple example:: from marshmallow import Schema from marshmallow_dataclass import", "to detect if this # decorator is being called with", "**kwargs, ) -> Callable[[_U], _U]: \"\"\"NewType creates simple unique types", "keyword, so start it with an # underscore. The presence", "type Traceback (most recent call last): ... TypeError: None is", ") for subtyp in arguments ], **metadata, ) # typing.NewType", "be statically type checked Point(x=0.0, y=0.0) \"\"\" # dataclass's typing", "the type checker \"\"\" import inspect from enum import EnumMeta", "field = _field_by_type(typ, base_schema) if field: return field(**metadata) if typ", "called as a function, so ignore type check dc =", "valid']} >>> # noinspection PyTypeChecker >>> class_schema(None) # unsupported type", "= NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>> @dataclass ... class MyIps: ...", "class Person: ... name: str = dataclasses.field(default=\"Anonymous\") ... friends: List['Person']", "-> Type[_U]: ... def add_schema(_cls=None, base_schema=None): \"\"\" This decorator adds", "contain marshmallow fields instead of dataclass fields attributes.update( ( field.name,", "int_field.default 9 >>> field_for_schema(str, metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__ <class 'marshmallow.fields.Url'> \"\"\" metadata", "Email = dataclasses.field(default=\"<EMAIL>\") >>> ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>') >>> ContactInfo.Schema().load({\"mail\": \"grumble grumble\"})", "coming our custom NewType implementation typ_args = getattr(typ, \"_marshmallow_args\", {})", "arguments ) tuple_type = type_mapping.get(Tuple, marshmallow.fields.Tuple) return tuple_type(children, **metadata) elif", "@marshmallow.validates_schema ... def validate(self, data, **_): ... raise marshmallow.ValidationError('never valid')", "# Remove `type: ignore` when mypy handles dynamic base classes", "return field(**metadata) if typ is Any: metadata.setdefault(\"allow_none\", True) return marshmallow.fields.Raw(**metadata)", "dataclass to which a Schema should be added :param base_schema:", "many: return [clazz(**loaded) for loaded in all_loaded] else: return clazz(**all_loaded)", "None ) -> Type[marshmallow.Schema]: \"\"\" Convert a class to a", "True) # Override base_schema.TYPE_MAPPING to change the class used for", "def _proxied_class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None ) ->", "metadata: Additional parameters to pass to the marshmallow field constructor", "origin in (tuple, Tuple): children = tuple( field_for_schema(arg, base_schema=base_schema) for", "meta_dict in (typ_args, metadata): if \"validate\" in meta_dict: if marshmallow.utils.is_iterable_but_not_string(meta_dict[\"validate\"]):", "person Person(name='Anonymous', friends=[Person(name='<NAME>', friends=[])]) >>> @dataclasses.dataclass() ... class C: ...", "Artist.Schema().loads('{\"NAMES\": [\"Martin\", \"Ramirez\"]}') >>> artist Artist(names=('Martin', 'Ramirez')) \"\"\" def decorator(clazz:", "from both `typ` and `metadata`. # See https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators: List[Callable]", "... url:str = dataclasses.field(metadata = { ... \"marshmallow_field\": marshmallow.fields.Url() #", "of _cls is used to detect if this # decorator", "be copied to generated schema. MEMBERS_WHITELIST: Set[str] = {\"Meta\"} #", "class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]:", "imported ... }, unknown=marshmallow.EXCLUDE) >>> c C(important=9, unimportant=0) >>> @dataclasses.dataclass", "a base class when deriving dataclass schema >>> @dataclass ...", "Schema # For the type checker ... >>> Point.Schema().load({'x':0, 'y':0})", "base_schema) return add_schema(dc, base_schema) @overload def add_schema(_cls: Type[_U]) -> Type[_U]:", "<class 'marshmallow.schema.Artist'> >>> from typing import ClassVar >>> from marshmallow", "be a dataclass) :param base_schema: marshmallow schema used as a", "\"\"\" # Add the information coming our custom NewType implementation", "for fields based on a super field. (Usually spawned from", "eq: bool = True, order: bool = False, unsafe_hash: bool", "nested = ( nested_schema or forward_reference or class_schema(typ, base_schema=base_schema) )", "from enum import EnumMeta from functools import lru_cache from typing", "The dataclass to which a Schema should be added :param", "... Schema: ClassVar[Type[Schema]] = Schema # For the type checker", "\"_marshmallow_args\", {}) # Handle multiple validators from both `typ` and", "metadata.setdefault(\"default\", default) # 'missing' must not be set for required", "# For the type checker \"\"\" import inspect from enum", "PyDataclass fields: Tuple[dataclasses.Field, ...] = dataclasses.fields(clazz) except TypeError: # Not", "tuple( field_for_schema(arg, base_schema=base_schema) for arg in arguments ) tuple_type =", "required field.']} >>> city_json = citySchema.dump(city) >>> city_json['best_building'] # We", "return predefined_field # Generic types specified without type arguments if", "Type[_U]: ... @overload def add_schema( base_schema: Type[marshmallow.Schema] = None, )", "['Not a valid URL.']} >>> @dataclasses.dataclass ... class NeverValid: ...", "Type[marshmallow.Schema]: \"\"\" Base schema factory that creates a schema for", "as the ``marshmallow_field`` key in the metadata dictionary. >>> import", ") or marshmallow.Schema.TYPE_MAPPING.get(typ) def _field_by_supertype( typ: Type, default: marshmallow.missing, newtype_supertype:", "attribute in a dataclass. It uses :func:`class_schema` internally. :param type", "when deriving dataclass schema >>> @dataclass ... class Artist: ...", "new_type._marshmallow_field = field # type: ignore new_type._marshmallow_args = kwargs #", "\"__forward_arg__\", None) nested = ( nested_schema or forward_reference or class_schema(typ,", "if origin in (list, List): child_type = field_for_schema(arguments[0], base_schema=base_schema) list_type", "can attach custom marshmallow attributes. All the keyword arguments passed", "type: ignore return clazz return decorator(_cls) if _cls else decorator", "that will be copied to generated schema. MEMBERS_WHITELIST: Set[str] =", "either from `base_schema` or `BaseSchema` \"\"\" # Remove `type: ignore`", "*, many: bool = None, **kwargs): all_loaded = super().load(data, many=many,", "marshmallow.fields.Url()}).__class__ <class 'marshmallow.fields.Url'> \"\"\" metadata = {} if metadata is", "be set for required fields. if not metadata.get(\"required\"): metadata.setdefault(\"missing\", default)", "unimportant=0) >>> @dataclasses.dataclass ... class Website: ... url:str = dataclasses.field(metadata", "... marshmallow.exceptions.ValidationError: {'url': ['Not a valid URL.']} >>> @dataclasses.dataclass ...", "typing_inspect __all__ = [\"dataclass\", \"add_schema\", \"class_schema\", \"field_for_schema\", \"NewType\"] NoneType =", "the marshmallow Field. :param typ: The type for which a", "3.7's :mod:`dataclasses` to :mod:`marshmallow` schemas. It takes a python class,", "typing_inspect.get_origin(typ) if origin: arguments = typing_inspect.get_args(typ, True) # Override base_schema.TYPE_MAPPING", "field_for_schema(arguments[0], base_schema=base_schema) list_type = type_mapping.get(List, marshmallow.fields.List) return list_type(child_type, **metadata) if", "str >>> Artist.Schema <class 'marshmallow.schema.Artist'> >>> from typing import ClassVar", "schemas. Removes duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE = 1024 # _cls should never", "= Dict[Any, Any] # Base types field = _field_by_type(typ, base_schema)", "class, and generates a marshmallow schema for it. Simple example::", "marshmallow.Schema): # type: ignore def load(self, data: Mapping, *, many:", "field will be imported ... \"unimportant\": 9 # This field", "base_schema: Optional[Type[marshmallow.Schema]], ) -> marshmallow.fields.Field: \"\"\" Return a new field", "`typ` and `metadata`. # See https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators: List[Callable] = []", "v for k, v in inspect.getmembers(clazz) if hasattr(v, \"__marshmallow_hook__\") or", "# Copy all marshmallow hooks and whitelisted members of the", "types as types with a None default metadata[\"default\"] = metadata.get(\"default\",", "base class when deriving dataclass schema >>> @dataclass ... class", "deriving dataclass schema :return: A marshmallow Schema corresponding to the", "type_mapping.get(Dict, marshmallow.fields.Dict) return dict_type( keys=field_for_schema(arguments[0], base_schema=base_schema), values=field_for_schema(arguments[1], base_schema=base_schema), **metadata, )", "# https://github.com/python/mypy/issues/2813 class BaseSchema(base_schema or marshmallow.Schema): # type: ignore def", ") def field_for_schema( typ: type, default=marshmallow.missing, metadata: Mapping[str, Any] =", "datetime.date = field(metadata= { \"required\": True # A parameter to", "(most recent call last): ... marshmallow.exceptions.ValidationError: {'ips': {1: ['String does", "is fixed default_factory = field.default_factory # type: ignore if default_factory", "metadata=dict(required=True)) >>> int_field.__class__ <class 'marshmallow.fields.Integer'> >>> int_field.default 9 >>> field_for_schema(str,", "= field # type: ignore new_type._marshmallow_args = kwargs # type:", "based on a super field. (Usually spawned from NewType) \"\"\"", "be generated :param default: value to use for (de)serialization when", "else None metadata = {\"description\": typ.__name__, **typ_args, **metadata} field =", "-> Callable[[Type[_U]], Type[_U]]: ... @overload def add_schema( _cls: Type[_U], base_schema:", "allows the conversion of python 3.7's :mod:`dataclasses` to :mod:`marshmallow` schemas.", "\"friends\": [{\"name\": \"<NAME>\"}] ... }) >>> person Person(name='Anonymous', friends=[Person(name='<NAME>', friends=[])])", "base_schema: Optional[Type[marshmallow.Schema]] ) -> Optional[Type[marshmallow.fields.Field]]: return ( base_schema and base_schema.TYPE_MAPPING.get(typ)", "https://github.com/lovasoa/marshmallow_dataclass/issues/91 new_validators: List[Callable] = [] for meta_dict in (typ_args, metadata):", "types origin = typing_inspect.get_origin(typ) if origin: arguments = typing_inspect.get_args(typ, True)", "for required fields. if not metadata.get(\"required\"): metadata.setdefault(\"missing\", default) else: metadata.setdefault(\"required\",", "= False, frozen: bool = False, base_schema: Optional[Type[marshmallow.Schema]] = None,", "A python class (may be a dataclass) :param base_schema: marshmallow", "marshmallow.missing return field.default def NewType( name: str, typ: Type[_U], field:", "Optional[Meters] ... name: str = dataclasses.field(default=\"anonymous\") ... class Meta: ...", "dataclass. It uses :func:`class_schema` internally. :param type cls: The dataclass", "Tuple[str, str] >>> artist = Artist.Schema().loads('{\"NAMES\": [\"Martin\", \"Ramirez\"]}') >>> artist", "__init__ method will be added: ... unimportant: int = dataclasses.field(init=False,", "str ... @marshmallow.validates('name') ... def validates(self, value): ... if len(value)", "Point(x=0.0, y=0.0) \"\"\" # dataclass's typing doesn't expect it to", "# Custom marshmallow field }) Schema: ClassVar[Type[Schema]] = Schema #", "the class object :param base_schema: marshmallow schema used as a", "defined by the user predefined_field = metadata.get(\"marshmallow_field\") if predefined_field: return", "from marshmallow_dataclass import dataclass import datetime @dataclass class User: birth:", "to which a Schema should be added :param base_schema: marshmallow", "should be added :param base_schema: marshmallow schema used as a", ">>> class_schema(Website)().load({\"url\": \"I am not a good URL !\"}) Traceback", "has no equivalent python type), you can pass it as", "True # A parameter to pass to marshmallow's field })", "return decorator(_cls) if _cls else decorator def class_schema( clazz: type,", "BaseSchema def _get_field_default(field: dataclasses.Field): \"\"\" Return a marshmallow default value", "class (not an instance) <class 'marshmallow.schema.Building'> >>> @dataclasses.dataclass() ... class", "= {} if metadata is None else dict(metadata) if default", "to use a custom marshmallow field (one that has no", "... raise marshmallow.ValidationError('never valid') ... >>> class_schema(NeverValid)().load({}) Traceback (most recent", "is not a dataclass and cannot be turned into one.", "mail: Email = dataclasses.field(default=\"<EMAIL>\") >>> ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>') >>> ContactInfo.Schema().load({\"mail\": \"grumble", "field_name, field_obj): ... field_obj.data_key = (field_obj.data_key or field_name).upper() >>> @add_schema(base_schema=BaseSchema)", "class User: birth: datetime.date = field(metadata= { \"required\": True #", "marshmallow import Schema from dataclasses import field from marshmallow_dataclass import", "decorator adds a marshmallow schema as the 'Schema' attribute in", "= dataclasses.field(default=\"<EMAIL>\") >>> ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>') >>> ContactInfo.Schema().load({\"mail\": \"grumble grumble\"}) Traceback", "= typing.NewType('Meters', float) >>> @dataclasses.dataclass() ... class Building: ... height:", ") schema_class = type(clazz.__name__, (_base_schema(clazz, base_schema),), attributes) return cast(Type[marshmallow.Schema], schema_class)", "the metadata dictionary. >>> import typing >>> Meters = typing.NewType('Meters',", "else: return field_for_schema( newtype_supertype, metadata=metadata, default=default, base_schema=base_schema, ) def field_for_schema(", "which you can attach custom marshmallow attributes. All the keyword", "# preserve field order ... class Point: ... x:float ...", "class_schema(C)().load({ ... \"important\": 9, # This field will be imported", ">>> import marshmallow.validate >>> IPv4 = NewType('IPv4', str, validate=marshmallow.validate.Regexp(r'^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$')) >>>", "\"\"\" metadata = {} if metadata is None else dict(metadata)", "call last): ... marshmallow.exceptions.ValidationError: {'url': ['Not a valid URL.']} >>>", "ContactInfo: ... mail: Email = dataclasses.field(default=\"<EMAIL>\") >>> ContactInfo.Schema().load({}) ContactInfo(mail='<EMAIL>') >>>", "generates a marshmallow schema for it. Simple example:: from marshmallow", "for `clazz` derived either from `base_schema` or `BaseSchema` \"\"\" #", "import union_field return union_field.Union( [ ( subtyp, field_for_schema( subtyp, metadata=metadata,", "return field.default def NewType( name: str, typ: Type[_U], field: Optional[Type[marshmallow.fields.Field]]", "Schema should be added :param base_schema: marshmallow schema used as", "def NewType( name: str, typ: Type[_U], field: Optional[Type[marshmallow.fields.Field]] = None,", "BaseSchema(marshmallow.Schema): ... def on_bind_field(self, field_name, field_obj): ... field_obj.data_key = (field_obj.data_key", "data: Mapping, *, many: bool = None, **kwargs): all_loaded =", "forward_reference = getattr(typ, \"__forward_arg__\", None) nested = ( nested_schema or", "detect if this # decorator is being called with parameters", ">>> @dataclasses.dataclass() ... class Building: ... height: Optional[Meters] ... name:", "None) # Nested dataclasses forward_reference = getattr(typ, \"__forward_arg__\", None) nested", "URL !\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'url':", "type checker ... >>> Point.Schema().load({'x':0, 'y':0}) # This line can", "It uses :func:`class_schema` internally. :param type cls: The dataclass to", "Removes duplicates. MAX_CLASS_SCHEMA_CACHE_SIZE = 1024 # _cls should never be", "will be copied to generated schema. MEMBERS_WHITELIST: Set[str] = {\"Meta\"}", "True) return marshmallow.fields.Raw(**metadata) # Generic types origin = typing_inspect.get_origin(typ) if", "= type_mapping.get(List, marshmallow.fields.List) return list_type(child_type, **metadata) if origin in (tuple,", "= None, **kwargs, ) -> Callable[[_U], _U]: \"\"\"NewType creates simple", "base_schema=base_schema, ) # enumerations if isinstance(typ, EnumMeta): import marshmallow_enum return", "marshmallow.fields.Raw(**metadata) # Generic types origin = typing_inspect.get_origin(typ) if origin: arguments", ">>> @dataclass ... class ContactInfo: ... mail: Email = dataclasses.field(default=\"<EMAIL>\")", "add_schema(_cls=None, base_schema=None): \"\"\" This decorator adds a marshmallow schema as", "last): ... marshmallow.exceptions.ValidationError: {'name': ['Name too long']} \"\"\" return _proxied_class_schema(clazz,", "field.default is dataclasses.MISSING: return marshmallow.missing return field.default def NewType( name:", "return clazz return decorator(_cls) if _cls else decorator def class_schema(", "schema :param clazz: A python class (may be a dataclass)", "class_schema(Anything)().load({\"name\": \"aaaaaargh\"}) Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: {'name':", "by the user predefined_field = metadata.get(\"marshmallow_field\") if predefined_field: return predefined_field", "import inspect from enum import EnumMeta from functools import lru_cache", "a dataclass) :param base_schema: marshmallow schema used as a base", "Handle multiple validators from both `typ` and `metadata`. # See", ">>> field_for_schema(str, metadata={\"marshmallow_field\": marshmallow.fields.Url()}).__class__ <class 'marshmallow.fields.Url'> \"\"\" metadata = {}", "... @marshmallow.validates('name') ... def validates(self, value): ... if len(value) >", "if field: return field(**metadata) if typ is Any: metadata.setdefault(\"allow_none\", True)", "clazz: A python class (may be a dataclass) :param base_schema:", "# Recursive field ... >>> person = class_schema(Person)().load({ ... \"friends\":", "# Not a dataclass try: return class_schema(dataclasses.dataclass(clazz), base_schema) except Exception:", "marshmallow.fields.Url() # Custom marshmallow field ... }) ... >>> class_schema(Website)().load({\"url\":", "= ( nested_schema or forward_reference or class_schema(typ, base_schema=base_schema) ) return", "preserve field order ... class Point: ... x:float ... y:float", "so start it with an # underscore. The presence of", "except TypeError: # Not a dataclass try: return class_schema(dataclasses.dataclass(clazz), base_schema)", "to another dataclasses. A schema will be created for it", "... def add_schema(_cls=None, base_schema=None): \"\"\" This decorator adds a marshmallow", "type, base_schema: Optional[Type[marshmallow.Schema]] = None ) -> Type[marshmallow.Schema]: try: #", "class_schema(dataclasses.dataclass(clazz), base_schema) except Exception: raise TypeError( f\"{getattr(clazz, '__name__', repr(clazz))} is", "'Ramirez')) \"\"\" def decorator(clazz: Type[_U]) -> Type[_U]: clazz.Schema = class_schema(clazz,", "@dataclasses.dataclass ... class Website: ... url:str = dataclasses.field(metadata = {", "@dataclasses.dataclass() ... class Person: ... name: str = dataclasses.field(default=\"Anonymous\") ...", "# underscore. The presence of _cls is used to detect", "unknown=marshmallow.EXCLUDE) >>> c C(important=9, unimportant=0) >>> @dataclasses.dataclass ... class Website:", "\"I am not a good URL !\"}) Traceback (most recent", "It takes a python class, and generates a marshmallow schema", "not a good URL !\"}) Traceback (most recent call last):", "schema as the 'Schema' attribute in a dataclass. It uses", "ignore # Treat optional types as types with a None", ">>> Meters = typing.NewType('Meters', float) >>> @dataclasses.dataclass() ... class Building:", "will be imported ... \"unimportant\": 9 # This field will", "data, **_): ... raise marshmallow.ValidationError('never valid') ... >>> class_schema(NeverValid)().load({}) Traceback", "metadata.get(\"marshmallow_field\") if predefined_field: return predefined_field # Generic types specified without", "Return a new field for fields based on a super", "None, **kwargs): all_loaded = super().load(data, many=many, **kwargs) many = self.many", "False return field_for_schema(subtyp, metadata=metadata, base_schema=base_schema) elif typing_inspect.is_union_type(typ): from . import", "to be called as a function, so ignore type check", "given python type. The metadata of the dataclass field is", "turned into one.\" ) # Copy all marshmallow hooks and", "if _cls else decorator def class_schema( clazz: type, base_schema: Optional[Type[marshmallow.Schema]]", "Type[_U]: ... def add_schema(_cls=None, base_schema=None): \"\"\" This decorator adds a", "class object :param base_schema: marshmallow schema used as a base", "def load(self, data: Mapping, *, many: bool = None, **kwargs):", "type: ignore new_type._marshmallow_field = field # type: ignore new_type._marshmallow_args =", "birth: datetime.date = field(metadata= { \"required\": True # A parameter", "checked Point(x=0.0, y=0.0) \"\"\" # dataclass's typing doesn't expect it", "types field = _field_by_type(typ, base_schema) if field: return field(**metadata) if", "instead of dataclass fields attributes.update( ( field.name, field_for_schema( field.type, _get_field_default(field),", "nested_schema or forward_reference or class_schema(typ, base_schema=base_schema) ) return marshmallow.fields.Nested(nested, **metadata)", "except Exception: raise TypeError( f\"{getattr(clazz, '__name__', repr(clazz))} is not a", "A parameter to pass to marshmallow's field }) website:str =", "Any: metadata.setdefault(\"allow_none\", True) return marshmallow.fields.Raw(**metadata) # Generic types origin =", "method will be added: ... unimportant: int = dataclasses.field(init=False, default=0)", "subtyp, field_for_schema( subtyp, metadata=metadata, base_schema=base_schema ), ) for subtyp in", "uses :func:`class_schema` internally. :param type cls: The dataclass to which", "it as the ``marshmallow_field`` key in the metadata dictionary. >>>", "field_for_schema( newtype_supertype, metadata=metadata, default=default, base_schema=base_schema, ) def field_for_schema( typ: type,", "marshmallow.missing: metadata.setdefault(\"default\", default) # 'missing' must not be set for", "constructor :param base_schema: marshmallow schema used as a base class", "Base schema factory that creates a schema for `clazz` derived", "to a marshmallow schema :param clazz: A python class (may", "Point: ... x:float ... y:float ... Schema: ClassVar[Type[Schema]] = Schema", "should be generated :param default: value to use for (de)serialization" ]
[ "try: peers = await network.get_peers() peers = filter_protocol(peers, 's') results", "= [], [] for k, v in results.items(): (r1 if", "sys.exit(1) loop, stopping_fut, loop_thread = create_and_start_event_loop() network = Network() network.start()", "peers = await network.get_peers() peers = filter_protocol(peers, 's') results =", "if not isinstance(v, Exception) else r2).append(k) print(f\"Received {len(results)} answers\") try:", "len(r2)) except ZeroDivisionError: propagation = 0 print(f\"Propagation rate: {propagation:.1f} percent\")", "loop_thread = create_and_start_event_loop() network = Network() network.start() @log_exceptions async def", "r2 = [], [] for k, v in results.items(): (r1", "v in results.items(): (r1 if not isinstance(v, Exception) else r2).append(k)", "Network from electrum_trc.util import create_and_start_event_loop, log_exceptions try: txid = sys.argv[1]", "{len(results)} answers\") try: propagation = len(r1) * 100. / (len(r1)", "= await network.get_peers() peers = filter_protocol(peers, 's') results = await", "print(\"usage: txradar txid\") sys.exit(1) loop, stopping_fut, loop_thread = create_and_start_event_loop() network", "@log_exceptions async def f(): try: peers = await network.get_peers() peers", "import sys import asyncio from electrum_trc.network import filter_protocol, Network from", "import asyncio from electrum_trc.network import filter_protocol, Network from electrum_trc.util import", "sys import asyncio from electrum_trc.network import filter_protocol, Network from electrum_trc.util", "[txid]) r1, r2 = [], [] for k, v in", "propagation = len(r1) * 100. / (len(r1) + len(r2)) except", "except: print(\"usage: txradar txid\") sys.exit(1) loop, stopping_fut, loop_thread = create_and_start_event_loop()", "electrum_trc.network import filter_protocol, Network from electrum_trc.util import create_and_start_event_loop, log_exceptions try:", "f(): try: peers = await network.get_peers() peers = filter_protocol(peers, 's')", "= len(r1) * 100. / (len(r1) + len(r2)) except ZeroDivisionError:", "= sys.argv[1] except: print(\"usage: txradar txid\") sys.exit(1) loop, stopping_fut, loop_thread", "from electrum_trc.network import filter_protocol, Network from electrum_trc.util import create_and_start_event_loop, log_exceptions", "= 0 print(f\"Propagation rate: {propagation:.1f} percent\") finally: stopping_fut.set_result(1) asyncio.run_coroutine_threadsafe(f(), loop)", "* 100. / (len(r1) + len(r2)) except ZeroDivisionError: propagation =", "r2).append(k) print(f\"Received {len(results)} answers\") try: propagation = len(r1) * 100.", "propagation = 0 print(f\"Propagation rate: {propagation:.1f} percent\") finally: stopping_fut.set_result(1) asyncio.run_coroutine_threadsafe(f(),", "ZeroDivisionError: propagation = 0 print(f\"Propagation rate: {propagation:.1f} percent\") finally: stopping_fut.set_result(1)", "except ZeroDivisionError: propagation = 0 print(f\"Propagation rate: {propagation:.1f} percent\") finally:", "stopping_fut, loop_thread = create_and_start_event_loop() network = Network() network.start() @log_exceptions async", "peers = filter_protocol(peers, 's') results = await network.send_multiple_requests(peers, 'blockchain.transaction.get', [txid])", "results.items(): (r1 if not isinstance(v, Exception) else r2).append(k) print(f\"Received {len(results)}", "+ len(r2)) except ZeroDivisionError: propagation = 0 print(f\"Propagation rate: {propagation:.1f}", "electrum_trc.util import create_and_start_event_loop, log_exceptions try: txid = sys.argv[1] except: print(\"usage:", "not isinstance(v, Exception) else r2).append(k) print(f\"Received {len(results)} answers\") try: propagation", "len(r1) * 100. / (len(r1) + len(r2)) except ZeroDivisionError: propagation", "= await network.send_multiple_requests(peers, 'blockchain.transaction.get', [txid]) r1, r2 = [], []", "loop, stopping_fut, loop_thread = create_and_start_event_loop() network = Network() network.start() @log_exceptions", "network.send_multiple_requests(peers, 'blockchain.transaction.get', [txid]) r1, r2 = [], [] for k,", "in results.items(): (r1 if not isinstance(v, Exception) else r2).append(k) print(f\"Received", "try: txid = sys.argv[1] except: print(\"usage: txradar txid\") sys.exit(1) loop,", "filter_protocol(peers, 's') results = await network.send_multiple_requests(peers, 'blockchain.transaction.get', [txid]) r1, r2", "answers\") try: propagation = len(r1) * 100. / (len(r1) +", "txid\") sys.exit(1) loop, stopping_fut, loop_thread = create_and_start_event_loop() network = Network()", "print(f\"Received {len(results)} answers\") try: propagation = len(r1) * 100. /", "(r1 if not isinstance(v, Exception) else r2).append(k) print(f\"Received {len(results)} answers\")", "'blockchain.transaction.get', [txid]) r1, r2 = [], [] for k, v", "network.start() @log_exceptions async def f(): try: peers = await network.get_peers()", "try: propagation = len(r1) * 100. / (len(r1) + len(r2))", "log_exceptions try: txid = sys.argv[1] except: print(\"usage: txradar txid\") sys.exit(1)", "'s') results = await network.send_multiple_requests(peers, 'blockchain.transaction.get', [txid]) r1, r2 =", "#!/usr/bin/env python3 import sys import asyncio from electrum_trc.network import filter_protocol,", "results = await network.send_multiple_requests(peers, 'blockchain.transaction.get', [txid]) r1, r2 = [],", "await network.send_multiple_requests(peers, 'blockchain.transaction.get', [txid]) r1, r2 = [], [] for", "= Network() network.start() @log_exceptions async def f(): try: peers =", "import create_and_start_event_loop, log_exceptions try: txid = sys.argv[1] except: print(\"usage: txradar", "sys.argv[1] except: print(\"usage: txradar txid\") sys.exit(1) loop, stopping_fut, loop_thread =", "<reponame>TheSin-/electrum-trc #!/usr/bin/env python3 import sys import asyncio from electrum_trc.network import", "asyncio from electrum_trc.network import filter_protocol, Network from electrum_trc.util import create_and_start_event_loop,", "(len(r1) + len(r2)) except ZeroDivisionError: propagation = 0 print(f\"Propagation rate:", "create_and_start_event_loop, log_exceptions try: txid = sys.argv[1] except: print(\"usage: txradar txid\")", "Network() network.start() @log_exceptions async def f(): try: peers = await", "import filter_protocol, Network from electrum_trc.util import create_and_start_event_loop, log_exceptions try: txid", "[], [] for k, v in results.items(): (r1 if not", "for k, v in results.items(): (r1 if not isinstance(v, Exception)", "network.get_peers() peers = filter_protocol(peers, 's') results = await network.send_multiple_requests(peers, 'blockchain.transaction.get',", "create_and_start_event_loop() network = Network() network.start() @log_exceptions async def f(): try:", "/ (len(r1) + len(r2)) except ZeroDivisionError: propagation = 0 print(f\"Propagation", "k, v in results.items(): (r1 if not isinstance(v, Exception) else", "isinstance(v, Exception) else r2).append(k) print(f\"Received {len(results)} answers\") try: propagation =", "filter_protocol, Network from electrum_trc.util import create_and_start_event_loop, log_exceptions try: txid =", "python3 import sys import asyncio from electrum_trc.network import filter_protocol, Network", "= create_and_start_event_loop() network = Network() network.start() @log_exceptions async def f():", "txradar txid\") sys.exit(1) loop, stopping_fut, loop_thread = create_and_start_event_loop() network =", "async def f(): try: peers = await network.get_peers() peers =", "else r2).append(k) print(f\"Received {len(results)} answers\") try: propagation = len(r1) *", "await network.get_peers() peers = filter_protocol(peers, 's') results = await network.send_multiple_requests(peers,", "[] for k, v in results.items(): (r1 if not isinstance(v,", "from electrum_trc.util import create_and_start_event_loop, log_exceptions try: txid = sys.argv[1] except:", "100. / (len(r1) + len(r2)) except ZeroDivisionError: propagation = 0", "network = Network() network.start() @log_exceptions async def f(): try: peers", "def f(): try: peers = await network.get_peers() peers = filter_protocol(peers,", "r1, r2 = [], [] for k, v in results.items():", "txid = sys.argv[1] except: print(\"usage: txradar txid\") sys.exit(1) loop, stopping_fut,", "Exception) else r2).append(k) print(f\"Received {len(results)} answers\") try: propagation = len(r1)", "= filter_protocol(peers, 's') results = await network.send_multiple_requests(peers, 'blockchain.transaction.get', [txid]) r1," ]
[ ") if indeg[v]: continue q.append(v) print(dist.max()) def main() -> typing.NoReturn:", "1] indeg[v] -= 1 dist[v] = max( dist[v], dist[u] +", "n, dtype=np.int64, ) for u in q: for j in", "numpy as np def solve( n: int, g: np.array, )", "sys.argv[-1] == OJ: from numba import i8, njit from numba.pycc", "dist[v] = max( dist[v], dist[u] + 1, ) if indeg[v]:", "-= 1 dist[v] = max( dist[v], dist[u] + 1, )", "indeg[v] += 1 g = g[g[:, 0].argsort()] i = np.searchsorted(", "= (i8, i8[:, :]) cc.export( fn.__name__, signature, )(fn) cc.compile() exit(0)", ").reshape(m, 2) - 1 solve(n, g) OJ = 'ONLINE_JUDGE' if", "g) OJ = 'ONLINE_JUDGE' if sys.argv[-1] == OJ: from numba", "= np.zeros( n, dtype=np.int64, ) for u in q: for", "u in q: for j in range( i[u], i[u +", "= np.zeros( n, dtype=np.int64, ) for v in g[:, 1]:", "if not indeg[v] ] dist = np.zeros( n, dtype=np.int64, )", ":]) cc.export( fn.__name__, signature, )(fn) cc.compile() exit(0) from my_module import", "n: int, g: np.array, ) -> typing.NoReturn: indeg = np.zeros(", "g = g[g[:, 0].argsort()] i = np.searchsorted( g[:, 0], np.arange(n", "1]: indeg[v] += 1 g = g[g[:, 0].argsort()] i =", "q: for j in range( i[u], i[u + 1], ):", "import CC cc = CC('my_module') fn = solve signature =", "signature = (i8, i8[:, :]) cc.export( fn.__name__, signature, )(fn) cc.compile()", "main() -> typing.NoReturn: n, m = map( int, input().split(), )", "== OJ: from numba import i8, njit from numba.pycc import", "-> typing.NoReturn: n, m = map( int, input().split(), ) g", "solve( n: int, g: np.array, ) -> typing.NoReturn: indeg =", ") for v in g[:, 1]: indeg[v] += 1 g", "print(dist.max()) def main() -> typing.NoReturn: n, m = map( int,", ") g = np.array( sys.stdin.read().split(), dtype=np.int64, ).reshape(m, 2) - 1", "m = map( int, input().split(), ) g = np.array( sys.stdin.read().split(),", "g[:, 1]: indeg[v] += 1 g = g[g[:, 0].argsort()] i", "v in range(n) if not indeg[v] ] dist = np.zeros(", "np.searchsorted( g[:, 0], np.arange(n + 1) ) q = [", "[ v for v in range(n) if not indeg[v] ]", "= CC('my_module') fn = solve signature = (i8, i8[:, :])", "indeg[v] -= 1 dist[v] = max( dist[v], dist[u] + 1,", "in q: for j in range( i[u], i[u + 1],", "numba.pycc import CC cc = CC('my_module') fn = solve signature", "not indeg[v] ] dist = np.zeros( n, dtype=np.int64, ) for", "def main() -> typing.NoReturn: n, m = map( int, input().split(),", "1, ) if indeg[v]: continue q.append(v) print(dist.max()) def main() ->", "'ONLINE_JUDGE' if sys.argv[-1] == OJ: from numba import i8, njit", "int, g: np.array, ) -> typing.NoReturn: indeg = np.zeros( n,", "if indeg[v]: continue q.append(v) print(dist.max()) def main() -> typing.NoReturn: n,", "g = np.array( sys.stdin.read().split(), dtype=np.int64, ).reshape(m, 2) - 1 solve(n,", "OJ = 'ONLINE_JUDGE' if sys.argv[-1] == OJ: from numba import", "import numpy as np def solve( n: int, g: np.array,", "] dist = np.zeros( n, dtype=np.int64, ) for u in", "1 dist[v] = max( dist[v], dist[u] + 1, ) if", "dtype=np.int64, ).reshape(m, 2) - 1 solve(n, g) OJ = 'ONLINE_JUDGE'", "dist = np.zeros( n, dtype=np.int64, ) for u in q:", "continue q.append(v) print(dist.max()) def main() -> typing.NoReturn: n, m =", "dtype=np.int64, ) for v in g[:, 1]: indeg[v] += 1", "sys.stdin.read().split(), dtype=np.int64, ).reshape(m, 2) - 1 solve(n, g) OJ =", "v = g[j, 1] indeg[v] -= 1 dist[v] = max(", "1 g = g[g[:, 0].argsort()] i = np.searchsorted( g[:, 0],", "1) ) q = [ v for v in range(n)", "np.arange(n + 1) ) q = [ v for v", "typing import numpy as np def solve( n: int, g:", "for v in range(n) if not indeg[v] ] dist =", "indeg[v] ] dist = np.zeros( n, dtype=np.int64, ) for u", "np def solve( n: int, g: np.array, ) -> typing.NoReturn:", "= g[g[:, 0].argsort()] i = np.searchsorted( g[:, 0], np.arange(n +", "dtype=np.int64, ) for u in q: for j in range(", "typing.NoReturn: indeg = np.zeros( n, dtype=np.int64, ) for v in", ") -> typing.NoReturn: indeg = np.zeros( n, dtype=np.int64, ) for", "np.array( sys.stdin.read().split(), dtype=np.int64, ).reshape(m, 2) - 1 solve(n, g) OJ", "in range(n) if not indeg[v] ] dist = np.zeros( n,", "in range( i[u], i[u + 1], ): v = g[j,", "g[j, 1] indeg[v] -= 1 dist[v] = max( dist[v], dist[u]", "np.array, ) -> typing.NoReturn: indeg = np.zeros( n, dtype=np.int64, )", "OJ: from numba import i8, njit from numba.pycc import CC", "1], ): v = g[j, 1] indeg[v] -= 1 dist[v]", "njit from numba.pycc import CC cc = CC('my_module') fn =", "typing.NoReturn: n, m = map( int, input().split(), ) g =", "0], np.arange(n + 1) ) q = [ v for", "cc = CC('my_module') fn = solve signature = (i8, i8[:,", "range( i[u], i[u + 1], ): v = g[j, 1]", "v for v in range(n) if not indeg[v] ] dist", "as np def solve( n: int, g: np.array, ) ->", "cc.export( fn.__name__, signature, )(fn) cc.compile() exit(0) from my_module import solve", "np.zeros( n, dtype=np.int64, ) for v in g[:, 1]: indeg[v]", "int, input().split(), ) g = np.array( sys.stdin.read().split(), dtype=np.int64, ).reshape(m, 2)", "= max( dist[v], dist[u] + 1, ) if indeg[v]: continue", "= solve signature = (i8, i8[:, :]) cc.export( fn.__name__, signature,", "-> typing.NoReturn: indeg = np.zeros( n, dtype=np.int64, ) for v", ") for u in q: for j in range( i[u],", "from numba.pycc import CC cc = CC('my_module') fn = solve", "max( dist[v], dist[u] + 1, ) if indeg[v]: continue q.append(v)", "i8[:, :]) cc.export( fn.__name__, signature, )(fn) cc.compile() exit(0) from my_module", "CC cc = CC('my_module') fn = solve signature = (i8,", "input().split(), ) g = np.array( sys.stdin.read().split(), dtype=np.int64, ).reshape(m, 2) -", "import typing import numpy as np def solve( n: int,", "np.zeros( n, dtype=np.int64, ) for u in q: for j", "dist[u] + 1, ) if indeg[v]: continue q.append(v) print(dist.max()) def", "solve(n, g) OJ = 'ONLINE_JUDGE' if sys.argv[-1] == OJ: from", "g[:, 0], np.arange(n + 1) ) q = [ v", "map( int, input().split(), ) g = np.array( sys.stdin.read().split(), dtype=np.int64, ).reshape(m,", "numba import i8, njit from numba.pycc import CC cc =", "v in g[:, 1]: indeg[v] += 1 g = g[g[:,", "g[g[:, 0].argsort()] i = np.searchsorted( g[:, 0], np.arange(n + 1)", "import sys import typing import numpy as np def solve(", "for v in g[:, 1]: indeg[v] += 1 g =", "= np.array( sys.stdin.read().split(), dtype=np.int64, ).reshape(m, 2) - 1 solve(n, g)", "j in range( i[u], i[u + 1], ): v =", "= [ v for v in range(n) if not indeg[v]", "def solve( n: int, g: np.array, ) -> typing.NoReturn: indeg", "0].argsort()] i = np.searchsorted( g[:, 0], np.arange(n + 1) )", "+ 1], ): v = g[j, 1] indeg[v] -= 1", "+ 1) ) q = [ v for v in", "+= 1 g = g[g[:, 0].argsort()] i = np.searchsorted( g[:,", "n, dtype=np.int64, ) for v in g[:, 1]: indeg[v] +=", "indeg = np.zeros( n, dtype=np.int64, ) for v in g[:,", "1 solve(n, g) OJ = 'ONLINE_JUDGE' if sys.argv[-1] == OJ:", "i8, njit from numba.pycc import CC cc = CC('my_module') fn", "solve signature = (i8, i8[:, :]) cc.export( fn.__name__, signature, )(fn)", "= map( int, input().split(), ) g = np.array( sys.stdin.read().split(), dtype=np.int64,", "fn.__name__, signature, )(fn) cc.compile() exit(0) from my_module import solve main()", "q = [ v for v in range(n) if not", "in g[:, 1]: indeg[v] += 1 g = g[g[:, 0].argsort()]", "q.append(v) print(dist.max()) def main() -> typing.NoReturn: n, m = map(", "g: np.array, ) -> typing.NoReturn: indeg = np.zeros( n, dtype=np.int64,", "CC('my_module') fn = solve signature = (i8, i8[:, :]) cc.export(", "(i8, i8[:, :]) cc.export( fn.__name__, signature, )(fn) cc.compile() exit(0) from", ") q = [ v for v in range(n) if", "dist[v], dist[u] + 1, ) if indeg[v]: continue q.append(v) print(dist.max())", "indeg[v]: continue q.append(v) print(dist.max()) def main() -> typing.NoReturn: n, m", "import i8, njit from numba.pycc import CC cc = CC('my_module')", "range(n) if not indeg[v] ] dist = np.zeros( n, dtype=np.int64,", "sys import typing import numpy as np def solve( n:", "from numba import i8, njit from numba.pycc import CC cc", "i = np.searchsorted( g[:, 0], np.arange(n + 1) ) q", "= g[j, 1] indeg[v] -= 1 dist[v] = max( dist[v],", "fn = solve signature = (i8, i8[:, :]) cc.export( fn.__name__,", "for u in q: for j in range( i[u], i[u", "= 'ONLINE_JUDGE' if sys.argv[-1] == OJ: from numba import i8,", "if sys.argv[-1] == OJ: from numba import i8, njit from", "n, m = map( int, input().split(), ) g = np.array(", "2) - 1 solve(n, g) OJ = 'ONLINE_JUDGE' if sys.argv[-1]", "): v = g[j, 1] indeg[v] -= 1 dist[v] =", "i[u + 1], ): v = g[j, 1] indeg[v] -=", "for j in range( i[u], i[u + 1], ): v", "= np.searchsorted( g[:, 0], np.arange(n + 1) ) q =", "i[u], i[u + 1], ): v = g[j, 1] indeg[v]", "+ 1, ) if indeg[v]: continue q.append(v) print(dist.max()) def main()", "- 1 solve(n, g) OJ = 'ONLINE_JUDGE' if sys.argv[-1] ==" ]
[ "Bilder\") ap.add_argument(\"-fr\", \"--framerate\", default=100, type=int, help=\"Framerate in fps. Richtwerte: <Flow", "und Verarbeiten starten t_aufnahme = Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab, framerate, startzeit))", "ap.add_argument(\"-n\", \"--number\", default=400, type=int, help=\"Anzahl an Frames die aufgenommen werden", "3 ml/s:50 fps, 3-6ml/s:100 fps, >6ml/s:200 fps; Default: 100 fps\")", "# Ordner erstellen print(f\"Ordnername: {dirname}\") beginn = time.time()-programmstart # Threads", "= argparse.ArgumentParser(description=\"\"\"Skript zum Aufnehmen von Bildern der Teststrecke und der", "in fps. Richtwerte: <Flow 3 ml/s:50 fps, 3-6ml/s:100 fps, >6ml/s:200", "# 2 Bytes empfangen except OSError: print(\"Kein Drucksensor angeschlossen\") exit()", "sollen. Default: 400 Bilder\") ap.add_argument(\"-fr\", \"--framerate\", default=100, type=int, help=\"Framerate in", "smbus2.SMBus(0) bus.read_i2c_block_data(0x40, 0, 2) # 2 Bytes empfangen except OSError:", "geöffnet.\") return False # Test ob Drucksensor angeschlossen ist try:", "Frames die aufgenommen werden sollen. Default: 400 Bilder\") ap.add_argument(\"-fr\", \"--framerate\",", "numberOfImagesToGrab = args['number'] framerate = args['framerate'] if __name__ == '__main__':", "time.time()-programmstart # Threads zum Aufnehmen und Verarbeiten starten t_aufnahme =", "Verarbeiten starten t_aufnahme = Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab, framerate, startzeit)) t_tracke", "import time programmstart = time.time() # Argumente parsen (bei Aufruf", "from pypylon import pylon import nimmAuf import smbus2 import os", "if __name__ == '__main__': startzeit = time.time() #Test ob Kamera", "type=int, help=\"Anzahl an Frames die aufgenommen werden sollen. Default: 400", "args = vars(ap.parse_args()) # Argumente des Parsers extrahieren numberOfImagesToGrab =", "beginn = time.time()-programmstart # Threads zum Aufnehmen und Verarbeiten starten", "der Volumenbestimmung von Luftblasen\"\"\") ap.add_argument(\"-n\", \"--number\", default=400, type=int, help=\"Anzahl an", "400 Bilder\") ap.add_argument(\"-fr\", \"--framerate\", default=100, type=int, help=\"Framerate in fps. Richtwerte:", "ap.add_argument(\"-fr\", \"--framerate\", default=100, type=int, help=\"Framerate in fps. Richtwerte: <Flow 3", "== 0: print(\"Keine Kamera angeschlossen oder Kamera woanders geöffnet.\") return", "numberOfImagesToGrab, framerate, startzeit)) t_tracke = Thread(target=bestimmeVolumen.tracke, args=(dirname, numberOfImagesToGrab)) t_aufnahme.start() t_tracke.start()", "{dirname}\") beginn = time.time()-programmstart # Threads zum Aufnehmen und Verarbeiten", "\"--number\", default=400, type=int, help=\"Anzahl an Frames die aufgenommen werden sollen.", "type=int, help=\"Framerate in fps. Richtwerte: <Flow 3 ml/s:50 fps, 3-6ml/s:100", "3-6ml/s:100 fps, >6ml/s:200 fps; Default: 100 fps\") args = vars(ap.parse_args())", "der aktuellen Zeit und den Parametern einen individuellen Ordnernamen generieren", "fps\") args = vars(ap.parse_args()) # Argumente des Parsers extrahieren numberOfImagesToGrab", "# Argumente parsen (bei Aufruf im Terminal z.B. 'starteMessung.py -n", "startzeit)) t_tracke = Thread(target=bestimmeVolumen.tracke, args=(dirname, numberOfImagesToGrab)) t_aufnahme.start() t_tracke.start() t_aufnahme.join() t_tracke.join()", "= args['framerate'] if __name__ == '__main__': startzeit = time.time() #Test", "= Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab, framerate, startzeit)) t_tracke = Thread(target=bestimmeVolumen.tracke, args=(dirname,", "Argumente des Parsers extrahieren numberOfImagesToGrab = args['number'] framerate = args['framerate']", "bestimmeVolumen from threading import Thread import time programmstart = time.time()", "datetime import datetime from pypylon import pylon import nimmAuf import", "<filename>starteMessung.py from datetime import datetime from pypylon import pylon import", "Teststrecke und der Volumenbestimmung von Luftblasen\"\"\") ap.add_argument(\"-n\", \"--number\", default=400, type=int,", "die aufgenommen werden sollen. Default: 400 Bilder\") ap.add_argument(\"-fr\", \"--framerate\", default=100,", "von Luftblasen\"\"\") ap.add_argument(\"-n\", \"--number\", default=400, type=int, help=\"Anzahl an Frames die", "z.B. 'starteMessung.py -n 100' eingeben) ap = argparse.ArgumentParser(description=\"\"\"Skript zum Aufnehmen", "Luftblasen\"\"\") ap.add_argument(\"-n\", \"--number\", default=400, type=int, help=\"Anzahl an Frames die aufgenommen", "= smbus2.SMBus(0) bus.read_i2c_block_data(0x40, 0, 2) # 2 Bytes empfangen except", "import pylon import nimmAuf import smbus2 import os import argparse", "und den Parametern einen individuellen Ordnernamen generieren dirname = f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}'", "os import argparse import bestimmeVolumen from threading import Thread import", "ob Drucksensor angeschlossen ist try: bus = smbus2.SMBus(0) bus.read_i2c_block_data(0x40, 0,", "import datetime from pypylon import pylon import nimmAuf import smbus2", "den Parametern einen individuellen Ordnernamen generieren dirname = f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname)", "angeschlossen ist devices = pylon.TlFactory.GetInstance().EnumerateDevices() if len(devices) == 0: print(\"Keine", "args['framerate'] if __name__ == '__main__': startzeit = time.time() #Test ob", "Bildern der Teststrecke und der Volumenbestimmung von Luftblasen\"\"\") ap.add_argument(\"-n\", \"--number\",", "(bei Aufruf im Terminal z.B. 'starteMessung.py -n 100' eingeben) ap", "fps. Richtwerte: <Flow 3 ml/s:50 fps, 3-6ml/s:100 fps, >6ml/s:200 fps;", "Zeit und den Parametern einen individuellen Ordnernamen generieren dirname =", "Ordner erstellen print(f\"Ordnername: {dirname}\") beginn = time.time()-programmstart # Threads zum", "nimmAuf import smbus2 import os import argparse import bestimmeVolumen from", "import smbus2 import os import argparse import bestimmeVolumen from threading", "Kamera angeschlossen oder Kamera woanders geöffnet.\") return False # Test", "import Thread import time programmstart = time.time() # Argumente parsen", "len(devices) == 0: print(\"Keine Kamera angeschlossen oder Kamera woanders geöffnet.\")", "= pylon.TlFactory.GetInstance().EnumerateDevices() if len(devices) == 0: print(\"Keine Kamera angeschlossen oder", "= time.time() #Test ob Kamera angeschlossen ist devices = pylon.TlFactory.GetInstance().EnumerateDevices()", "'starteMessung.py -n 100' eingeben) ap = argparse.ArgumentParser(description=\"\"\"Skript zum Aufnehmen von", "-n 100' eingeben) ap = argparse.ArgumentParser(description=\"\"\"Skript zum Aufnehmen von Bildern", "framerate, startzeit)) t_tracke = Thread(target=bestimmeVolumen.tracke, args=(dirname, numberOfImagesToGrab)) t_aufnahme.start() t_tracke.start() t_aufnahme.join()", "bus = smbus2.SMBus(0) bus.read_i2c_block_data(0x40, 0, 2) # 2 Bytes empfangen", "from datetime import datetime from pypylon import pylon import nimmAuf", "fps, 3-6ml/s:100 fps, >6ml/s:200 fps; Default: 100 fps\") args =", "0: print(\"Keine Kamera angeschlossen oder Kamera woanders geöffnet.\") return False", "100' eingeben) ap = argparse.ArgumentParser(description=\"\"\"Skript zum Aufnehmen von Bildern der", "an Frames die aufgenommen werden sollen. Default: 400 Bilder\") ap.add_argument(\"-fr\",", "extrahieren numberOfImagesToGrab = args['number'] framerate = args['framerate'] if __name__ ==", "print(\"Kein Drucksensor angeschlossen\") exit() # Aus der aktuellen Zeit und", "ist try: bus = smbus2.SMBus(0) bus.read_i2c_block_data(0x40, 0, 2) # 2", "threading import Thread import time programmstart = time.time() # Argumente", "Argumente parsen (bei Aufruf im Terminal z.B. 'starteMessung.py -n 100'", "zum Aufnehmen und Verarbeiten starten t_aufnahme = Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab,", "Terminal z.B. 'starteMessung.py -n 100' eingeben) ap = argparse.ArgumentParser(description=\"\"\"Skript zum", "Drucksensor angeschlossen\") exit() # Aus der aktuellen Zeit und den", "individuellen Ordnernamen generieren dirname = f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname) # Ordner erstellen", "Thread import time programmstart = time.time() # Argumente parsen (bei", "argparse import bestimmeVolumen from threading import Thread import time programmstart", "programmstart = time.time() # Argumente parsen (bei Aufruf im Terminal", "False # Test ob Drucksensor angeschlossen ist try: bus =", "Aufruf im Terminal z.B. 'starteMessung.py -n 100' eingeben) ap =", "Kamera woanders geöffnet.\") return False # Test ob Drucksensor angeschlossen", "fps; Default: 100 fps\") args = vars(ap.parse_args()) # Argumente des", "werden sollen. Default: 400 Bilder\") ap.add_argument(\"-fr\", \"--framerate\", default=100, type=int, help=\"Framerate", "vars(ap.parse_args()) # Argumente des Parsers extrahieren numberOfImagesToGrab = args['number'] framerate", "Default: 100 fps\") args = vars(ap.parse_args()) # Argumente des Parsers", "time.time() #Test ob Kamera angeschlossen ist devices = pylon.TlFactory.GetInstance().EnumerateDevices() if", "der Teststrecke und der Volumenbestimmung von Luftblasen\"\"\") ap.add_argument(\"-n\", \"--number\", default=400,", "pylon.TlFactory.GetInstance().EnumerateDevices() if len(devices) == 0: print(\"Keine Kamera angeschlossen oder Kamera", "ist devices = pylon.TlFactory.GetInstance().EnumerateDevices() if len(devices) == 0: print(\"Keine Kamera", "generieren dirname = f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname) # Ordner erstellen print(f\"Ordnername: {dirname}\")", "import os import argparse import bestimmeVolumen from threading import Thread", "# Test ob Drucksensor angeschlossen ist try: bus = smbus2.SMBus(0)", "time programmstart = time.time() # Argumente parsen (bei Aufruf im", "default=100, type=int, help=\"Framerate in fps. Richtwerte: <Flow 3 ml/s:50 fps,", "pylon import nimmAuf import smbus2 import os import argparse import", "100 fps\") args = vars(ap.parse_args()) # Argumente des Parsers extrahieren", "Bytes empfangen except OSError: print(\"Kein Drucksensor angeschlossen\") exit() # Aus", "t_aufnahme = Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab, framerate, startzeit)) t_tracke = Thread(target=bestimmeVolumen.tracke,", "angeschlossen\") exit() # Aus der aktuellen Zeit und den Parametern", "erstellen print(f\"Ordnername: {dirname}\") beginn = time.time()-programmstart # Threads zum Aufnehmen", "zum Aufnehmen von Bildern der Teststrecke und der Volumenbestimmung von", "argparse.ArgumentParser(description=\"\"\"Skript zum Aufnehmen von Bildern der Teststrecke und der Volumenbestimmung", "Richtwerte: <Flow 3 ml/s:50 fps, 3-6ml/s:100 fps, >6ml/s:200 fps; Default:", "framerate = args['framerate'] if __name__ == '__main__': startzeit = time.time()", "einen individuellen Ordnernamen generieren dirname = f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname) # Ordner", "= vars(ap.parse_args()) # Argumente des Parsers extrahieren numberOfImagesToGrab = args['number']", "Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab, framerate, startzeit)) t_tracke = Thread(target=bestimmeVolumen.tracke, args=(dirname, numberOfImagesToGrab))", "Drucksensor angeschlossen ist try: bus = smbus2.SMBus(0) bus.read_i2c_block_data(0x40, 0, 2)", "Aufnehmen von Bildern der Teststrecke und der Volumenbestimmung von Luftblasen\"\"\")", "angeschlossen ist try: bus = smbus2.SMBus(0) bus.read_i2c_block_data(0x40, 0, 2) #", "pypylon import pylon import nimmAuf import smbus2 import os import", "aktuellen Zeit und den Parametern einen individuellen Ordnernamen generieren dirname", "dirname = f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname) # Ordner erstellen print(f\"Ordnername: {dirname}\") beginn", "Test ob Drucksensor angeschlossen ist try: bus = smbus2.SMBus(0) bus.read_i2c_block_data(0x40,", "Default: 400 Bilder\") ap.add_argument(\"-fr\", \"--framerate\", default=100, type=int, help=\"Framerate in fps.", "Kamera angeschlossen ist devices = pylon.TlFactory.GetInstance().EnumerateDevices() if len(devices) == 0:", "Ordnernamen generieren dirname = f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname) # Ordner erstellen print(f\"Ordnername:", "# Threads zum Aufnehmen und Verarbeiten starten t_aufnahme = Thread(target=nimmAuf.starte,", "args['number'] framerate = args['framerate'] if __name__ == '__main__': startzeit =", "print(f\"Ordnername: {dirname}\") beginn = time.time()-programmstart # Threads zum Aufnehmen und", "args=(dirname, numberOfImagesToGrab, framerate, startzeit)) t_tracke = Thread(target=bestimmeVolumen.tracke, args=(dirname, numberOfImagesToGrab)) t_aufnahme.start()", "ob Kamera angeschlossen ist devices = pylon.TlFactory.GetInstance().EnumerateDevices() if len(devices) ==", "<Flow 3 ml/s:50 fps, 3-6ml/s:100 fps, >6ml/s:200 fps; Default: 100", "datetime from pypylon import pylon import nimmAuf import smbus2 import", "\"--framerate\", default=100, type=int, help=\"Framerate in fps. Richtwerte: <Flow 3 ml/s:50", ">6ml/s:200 fps; Default: 100 fps\") args = vars(ap.parse_args()) # Argumente", "import bestimmeVolumen from threading import Thread import time programmstart =", "if len(devices) == 0: print(\"Keine Kamera angeschlossen oder Kamera woanders", "from threading import Thread import time programmstart = time.time() #", "#Test ob Kamera angeschlossen ist devices = pylon.TlFactory.GetInstance().EnumerateDevices() if len(devices)", "except OSError: print(\"Kein Drucksensor angeschlossen\") exit() # Aus der aktuellen", "= f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname) # Ordner erstellen print(f\"Ordnername: {dirname}\") beginn =", "return False # Test ob Drucksensor angeschlossen ist try: bus", "= time.time() # Argumente parsen (bei Aufruf im Terminal z.B.", "devices = pylon.TlFactory.GetInstance().EnumerateDevices() if len(devices) == 0: print(\"Keine Kamera angeschlossen", "eingeben) ap = argparse.ArgumentParser(description=\"\"\"Skript zum Aufnehmen von Bildern der Teststrecke", "Threads zum Aufnehmen und Verarbeiten starten t_aufnahme = Thread(target=nimmAuf.starte, args=(dirname,", "f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname) # Ordner erstellen print(f\"Ordnername: {dirname}\") beginn = time.time()-programmstart", "import argparse import bestimmeVolumen from threading import Thread import time", "help=\"Anzahl an Frames die aufgenommen werden sollen. Default: 400 Bilder\")", "# Argumente des Parsers extrahieren numberOfImagesToGrab = args['number'] framerate =", "# Aus der aktuellen Zeit und den Parametern einen individuellen", "os.mkdir(dirname) # Ordner erstellen print(f\"Ordnername: {dirname}\") beginn = time.time()-programmstart #", "oder Kamera woanders geöffnet.\") return False # Test ob Drucksensor", "startzeit = time.time() #Test ob Kamera angeschlossen ist devices =", "'__main__': startzeit = time.time() #Test ob Kamera angeschlossen ist devices", "= time.time()-programmstart # Threads zum Aufnehmen und Verarbeiten starten t_aufnahme", "und der Volumenbestimmung von Luftblasen\"\"\") ap.add_argument(\"-n\", \"--number\", default=400, type=int, help=\"Anzahl", "empfangen except OSError: print(\"Kein Drucksensor angeschlossen\") exit() # Aus der", "help=\"Framerate in fps. Richtwerte: <Flow 3 ml/s:50 fps, 3-6ml/s:100 fps,", "Aufnehmen und Verarbeiten starten t_aufnahme = Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab, framerate,", "Volumenbestimmung von Luftblasen\"\"\") ap.add_argument(\"-n\", \"--number\", default=400, type=int, help=\"Anzahl an Frames", "starten t_aufnahme = Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab, framerate, startzeit)) t_tracke =", "aufgenommen werden sollen. Default: 400 Bilder\") ap.add_argument(\"-fr\", \"--framerate\", default=100, type=int,", "fps, >6ml/s:200 fps; Default: 100 fps\") args = vars(ap.parse_args()) #", "0, 2) # 2 Bytes empfangen except OSError: print(\"Kein Drucksensor", "bus.read_i2c_block_data(0x40, 0, 2) # 2 Bytes empfangen except OSError: print(\"Kein", "2 Bytes empfangen except OSError: print(\"Kein Drucksensor angeschlossen\") exit() #", "default=400, type=int, help=\"Anzahl an Frames die aufgenommen werden sollen. Default:", "try: bus = smbus2.SMBus(0) bus.read_i2c_block_data(0x40, 0, 2) # 2 Bytes", "von Bildern der Teststrecke und der Volumenbestimmung von Luftblasen\"\"\") ap.add_argument(\"-n\",", "Aus der aktuellen Zeit und den Parametern einen individuellen Ordnernamen", "im Terminal z.B. 'starteMessung.py -n 100' eingeben) ap = argparse.ArgumentParser(description=\"\"\"Skript", "__name__ == '__main__': startzeit = time.time() #Test ob Kamera angeschlossen", "import nimmAuf import smbus2 import os import argparse import bestimmeVolumen", "Parametern einen individuellen Ordnernamen generieren dirname = f'{datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")}' os.mkdir(dirname) #", "Parsers extrahieren numberOfImagesToGrab = args['number'] framerate = args['framerate'] if __name__", "ml/s:50 fps, 3-6ml/s:100 fps, >6ml/s:200 fps; Default: 100 fps\") args", "time.time() # Argumente parsen (bei Aufruf im Terminal z.B. 'starteMessung.py", "2) # 2 Bytes empfangen except OSError: print(\"Kein Drucksensor angeschlossen\")", "= args['number'] framerate = args['framerate'] if __name__ == '__main__': startzeit", "angeschlossen oder Kamera woanders geöffnet.\") return False # Test ob", "ap = argparse.ArgumentParser(description=\"\"\"Skript zum Aufnehmen von Bildern der Teststrecke und", "== '__main__': startzeit = time.time() #Test ob Kamera angeschlossen ist", "OSError: print(\"Kein Drucksensor angeschlossen\") exit() # Aus der aktuellen Zeit", "woanders geöffnet.\") return False # Test ob Drucksensor angeschlossen ist", "des Parsers extrahieren numberOfImagesToGrab = args['number'] framerate = args['framerate'] if", "smbus2 import os import argparse import bestimmeVolumen from threading import", "exit() # Aus der aktuellen Zeit und den Parametern einen", "print(\"Keine Kamera angeschlossen oder Kamera woanders geöffnet.\") return False #", "parsen (bei Aufruf im Terminal z.B. 'starteMessung.py -n 100' eingeben)" ]
[ "return ((self.getLUy(), self.getLUx()), # почему -? я не знаю -self.getHeight(),", "- minRDy) * (minRDx - minLUx) # не уверен что", "temp = tempfile.NamedTemporaryFile() path = os.path.join(os.getcwd(), temp.name) plt.savefig(path) return os.path.split(temp.name", "__init__(self, coordinates: list): if len(coordinates) != 4: raise ValueError(\"Нужно подавать", "self.RUy - self.LDy def getLUx(self): return self.LDx def getLUy(self): return", "x2 <= maxX) return a or b or c or", "PIL import Image import numpy as np import matplotlib.patches as", "= bigLUy if bigRDy < minRDy: minRDy = bigRDy if", "and b and c and d # если тру, то", "абсолютно в другом порядке, чем должно быть? что ха дринся", "(minX <= x2 <= maxX) return a and b and", "d = (minX <= x2 <= maxX) return a and", "(minX <= x2 <= maxX) return a or b or", "[Rectangle(i) for i in objectsListRect] rect = patches.Rectangle(*bigRect.getMTparam(), linewidth=1, edgecolor='g',", "* (minRDx - minLUx) return inObjSquare / fullSquare >= innerPercent", "minLUx = bigLUx if bigRDx > minRDx: minRDx = bigRDx", "= (minLUy - minRDy) * (minRDx - minLUx) # не", "getLUx(self): return self.LDx def getLUy(self): return self.RUy def getMTparam(self): return", "raise ValueError(\"Нужно подавать координаты(х,у) двух противоложных вершин\") if coordinates[0] >=", "maxX) return a and b and c and d #", "minRDx: minRDx = bigRDx inObjSquare = (minLUy - minRDy) *", "objectsListRect] rect = patches.Rectangle(*bigRect.getMTparam(), linewidth=1, edgecolor='g', facecolor='None') ax.add_patch(rect) for i", "bigRDy, bigRDx = bigRect minLUy, minLUx, minRDy, minRDx = minRect", "if coordinates[0] >= coordinates[2] or coordinates[1] >= coordinates[3]: raise ValueError(", "объект полностью внутри большого прямоугольника def isPartiallyInside(bigRect, minRect, innerPercent=0.5): #", "в другом порядке, чем должно быть? что ха дринся def", "x1, y2, x2 = bigRect minX = x1 minY =", "d def isCompletelyInside(bigRect, minRect): # объект полностью внутри прямоугольника y1,", "все абсолютно в другом порядке, чем должно быть? что ха", "def getLUx(self): return self.LDx def getLUy(self): return self.RUy def getMTparam(self):", "ValueError(\"Нужно подавать координаты(х,у) двух противоложных вершин\") if coordinates[0] >= coordinates[2]", "правильно # Не уверен в ифах if bigLUy < minLUy:", "a or b or c or d def isCompletelyInside(bigRect, minRect):", "self.LDx def getLUy(self): return self.RUy def getMTparam(self): return ((self.getLUy(), self.getLUx()),", "/ fullSquare >= innerPercent def createGraphic(imagePath: str, searchRect: list, objectsListRect:", "x2 <= maxX) return a and b and c and", "inObjSquare / fullSquare >= innerPercent def createGraphic(imagePath: str, searchRect: list,", "raise ValueError( \"Неверно заданы вершины, сначала подаются 2 координаты нижнего", "прямоугольника y1, x1, y2, x2 = bigRect minX = x1", "minRDx = bigRDx inObjSquare = (minLUy - minRDy) * (minRDx", "= 0 LDy = 0 RUx = 0 RUy =", ">= coordinates[2] or coordinates[1] >= coordinates[3]: raise ValueError( \"Неверно заданы", "innerPercent=0.5): # объект частично внутри прямоугольника bigLUy, bigLUx, bigRDy, bigRDx", "or d def isCompletelyInside(bigRect, minRect): # объект полностью внутри прямоугольника", "minRDy) * (minRDx - minLUx) # не уверен что правильно", "- minRDy) * (minRDx - minLUx) return inObjSquare / fullSquare", "y2 <= maxY) d = (minX <= x2 <= maxX)", "bigLUy < minLUy: minLUy = bigLUy if bigRDy < minRDy:", "self.RUx - self.LDx def getHeight(self): return self.RUy - self.LDy def", "def getHeight(self): return self.RUy - self.LDy def getLUx(self): return self.LDx", "# не уверен что правильно # Не уверен в ифах", "ифах if bigLUy < minLUy: minLUy = bigLUy if bigRDy", "0 RUy = 0 def __init__(self, coordinates: list): if len(coordinates)", "bigLUx > minLUx: minLUx = bigLUx if bigRDx > minRDx:", "linewidth=1, edgecolor='r', facecolor='none') ax.add_patch(rect) temp = tempfile.NamedTemporaryFile() path = os.path.join(os.getcwd(),", "b and c and d # если тру, то объект", "getHeight(self): return self.RUy - self.LDy def getLUx(self): return self.LDx def", "minRect): # хотя бы одна точка лежит внутри minY, minX,", "def getMTparam(self): return ((self.getLUy(), self.getLUx()), # почему -? я не", "> minLUx: minLUx = bigLUx if bigRDx > minRDx: minRDx", "x1 minY = y1 # вроде верно maxX = x2", "то объект полностью внутри большого прямоугольника def isPartiallyInside(bigRect, minRect, innerPercent=0.5):", "= bigRDy if bigLUx > minLUx: minLUx = bigLUx if", "> minRDx: minRDx = bigRDx inObjSquare = (minLUy - minRDy)", "<= maxY) b = (minX <= x1 <= maxX) c", "minRect fullSquare = (minLUy - minRDy) * (minRDx - minLUx)", "<= y1 <= maxY) b = (minX <= x1 <=", "tempfile def hasOnePointInside(bigRect, minRect): # хотя бы одна точка лежит", "len(coordinates) != 4: raise ValueError(\"Нужно подавать координаты(х,у) двух противоложных вершин\")", "угла, потом верхнего правого\") self.LDx, self.LDy, self.RUx, self.RUy = coordinates", "что ха дринся def getCenterOfDown(self): return [(self.LDx + self.RUx) /", "- minLUx) # не уверен что правильно # Не уверен", "RUx = 0 RUy = 0 def __init__(self, coordinates: list):", "bigLUy if bigRDy < minRDy: minRDy = bigRDy if bigLUx", "getLUy(self): return self.RUy def getMTparam(self): return ((self.getLUy(), self.getLUx()), # почему", "= y2 y1, x1, y2, x2 = minRect a =", "= tempfile.NamedTemporaryFile() path = os.path.join(os.getcwd(), temp.name) plt.savefig(path) return os.path.split(temp.name +", "= [Rectangle(i) for i in objectsListRect] rect = patches.Rectangle(*bigRect.getMTparam(), linewidth=1,", "os import tempfile def hasOnePointInside(bigRect, minRect): # хотя бы одна", "if bigRDy < minRDy: minRDy = bigRDy if bigLUx >", "= 0 RUx = 0 RUy = 0 def __init__(self,", "= (minY <= y1 <= maxY) b = (minX <=", "# почему -? я не знаю -self.getHeight(), self.getWidth()) # все", "rect = patches.Rectangle(*i.getMTparam(), linewidth=1, edgecolor='r', facecolor='none') ax.add_patch(rect) temp = tempfile.NamedTemporaryFile()", "return a or b or c or d def isCompletelyInside(bigRect,", "двух противоложных вершин\") if coordinates[0] >= coordinates[2] or coordinates[1] >=", "bigRDx inObjSquare = (minLUy - minRDy) * (minRDx - minLUx)", "левого угла, потом верхнего правого\") self.LDx, self.LDy, self.RUx, self.RUy =", "os.path.split(temp.name + \".png\") class Rectangle: LDx = 0 LDy =", "ax.add_patch(rect) for i in minRects: rect = patches.Rectangle(*i.getMTparam(), linewidth=1, edgecolor='r',", "coordinates[1] >= coordinates[3]: raise ValueError( \"Неверно заданы вершины, сначала подаются", "bigLUy, bigLUx, bigRDy, bigRDx = bigRect minLUy, minLUx, minRDy, minRDx", "= bigRDx inObjSquare = (minLUy - minRDy) * (minRDx -", "<= x2 <= maxX) return a or b or c", "# объект частично внутри прямоугольника bigLUy, bigLUx, bigRDy, bigRDx =", "координаты(х,у) двух противоложных вершин\") if coordinates[0] >= coordinates[2] or coordinates[1]", "maxX) return a or b or c or d def", "x2 = minRect a = (minY <= y1 <= maxY)", "i in objectsListRect] rect = patches.Rectangle(*bigRect.getMTparam(), linewidth=1, edgecolor='g', facecolor='None') ax.add_patch(rect)", "tempfile.NamedTemporaryFile() path = os.path.join(os.getcwd(), temp.name) plt.savefig(path) return os.path.split(temp.name + \".png\")", ">= innerPercent def createGraphic(imagePath: str, searchRect: list, objectsListRect: list): import", "y2, x2 = bigRect minX = x1 minY = y1", "= coordinates def getWidth(self): return self.RUx - self.LDx def getHeight(self):", "y1, x1, y2, x2 = minRect a = (minY <=", "minLUy, minLUx, minRDy, minRDx = minRect fullSquare = (minLUy -", "minRect a = (minY <= y1 <= maxY) b =", "maxX = bigRect y1, x1, y2, x2 = minRect a", "minRects = [Rectangle(i) for i in objectsListRect] rect = patches.Rectangle(*bigRect.getMTparam(),", "bigRDx = bigRect minLUy, minLUx, minRDy, minRDx = minRect fullSquare", "порядке, чем должно быть? что ха дринся def getCenterOfDown(self): return", "Rectangle(searchRect) minRects = [Rectangle(i) for i in objectsListRect] rect =", "i in minRects: rect = patches.Rectangle(*i.getMTparam(), linewidth=1, edgecolor='r', facecolor='none') ax.add_patch(rect)", "(minRDx - minLUx) # не уверен что правильно # Не", "-? я не знаю -self.getHeight(), self.getWidth()) # все абсолютно в", "objectsListRect: list): import matplotlib.pyplot as plt from PIL import Image", "self.getWidth()) # все абсолютно в другом порядке, чем должно быть?", "должно быть? что ха дринся def getCenterOfDown(self): return [(self.LDx +", "import os import tempfile def hasOnePointInside(bigRect, minRect): # хотя бы", "minLUx) return inObjSquare / fullSquare >= innerPercent def createGraphic(imagePath: str,", "as plt from PIL import Image import numpy as np", "уверен что правильно # Не уверен в ифах if bigLUy", "= (minX <= x1 <= maxX) c = (minY <=", "= patches.Rectangle(*i.getMTparam(), linewidth=1, edgecolor='r', facecolor='none') ax.add_patch(rect) temp = tempfile.NamedTemporaryFile() path", "<= x1 <= maxX) c = (minY <= y2 <=", "= os.path.join(os.getcwd(), temp.name) plt.savefig(path) return os.path.split(temp.name + \".png\") class Rectangle:", "= np.array(Image.open(imagePath), dtype=np.uint8) fig, ax = plt.subplots(1) ax.imshow(im) bigRect =", "maxX) c = (minY <= y2 <= maxY) d =", "minX = x1 minY = y1 # вроде верно maxX", "bigLUx if bigRDx > minRDx: minRDx = bigRDx inObjSquare =", "# вроде верно maxX = x2 maxY = y2 y1,", "self.RUx, self.RUy = coordinates def getWidth(self): return self.RUx - self.LDx", "(minX <= x1 <= maxX) c = (minY <= y2", "точка лежит внутри minY, minX, maxY, maxX = bigRect y1,", "list): import matplotlib.pyplot as plt from PIL import Image import", "частично внутри прямоугольника bigLUy, bigLUx, bigRDy, bigRDx = bigRect minLUy,", "полностью внутри прямоугольника y1, x1, y2, x2 = bigRect minX", "for i in objectsListRect] rect = patches.Rectangle(*bigRect.getMTparam(), linewidth=1, edgecolor='g', facecolor='None')", "minLUx: minLUx = bigLUx if bigRDx > minRDx: minRDx =", "объект частично внутри прямоугольника bigLUy, bigLUx, bigRDy, bigRDx = bigRect", "ValueError( \"Неверно заданы вершины, сначала подаются 2 координаты нижнего левого", "(minLUy - minRDy) * (minRDx - minLUx) return inObjSquare /", "<= x2 <= maxX) return a and b and c", "Image import numpy as np import matplotlib.patches as patches im", "a = (minY <= y1 <= maxY) b = (minX", "дринся def getCenterOfDown(self): return [(self.LDx + self.RUx) / 2, self.LDy]", "подавать координаты(х,у) двух противоложных вершин\") if coordinates[0] >= coordinates[2] or", "внутри прямоугольника bigLUy, bigLUx, bigRDy, bigRDx = bigRect minLUy, minLUx,", "- minLUx) return inObjSquare / fullSquare >= innerPercent def createGraphic(imagePath:", "= 0 RUy = 0 def __init__(self, coordinates: list): if", "bigLUx, bigRDy, bigRDx = bigRect minLUy, minLUx, minRDy, minRDx =", "< minLUy: minLUy = bigLUy if bigRDy < minRDy: minRDy", "coordinates def getWidth(self): return self.RUx - self.LDx def getHeight(self): return", "minRDy) * (minRDx - minLUx) return inObjSquare / fullSquare >=", "- self.LDx def getHeight(self): return self.RUy - self.LDy def getLUx(self):", "str, searchRect: list, objectsListRect: list): import matplotlib.pyplot as plt from", "bigRect minLUy, minLUx, minRDy, minRDx = minRect fullSquare = (minLUy", "<= maxX) return a or b or c or d", "# все абсолютно в другом порядке, чем должно быть? что", "return inObjSquare / fullSquare >= innerPercent def createGraphic(imagePath: str, searchRect:", "plt.savefig(path) return os.path.split(temp.name + \".png\") class Rectangle: LDx = 0", "0 def __init__(self, coordinates: list): if len(coordinates) != 4: raise", "if bigRDx > minRDx: minRDx = bigRDx inObjSquare = (minLUy", "import numpy as np import matplotlib.patches as patches im =", "я не знаю -self.getHeight(), self.getWidth()) # все абсолютно в другом", "= y1 # вроде верно maxX = x2 maxY =", "minY = y1 # вроде верно maxX = x2 maxY", "ax = plt.subplots(1) ax.imshow(im) bigRect = Rectangle(searchRect) minRects = [Rectangle(i)", "from PIL import Image import numpy as np import matplotlib.patches", "= Rectangle(searchRect) minRects = [Rectangle(i) for i in objectsListRect] rect", "hasOnePointInside(bigRect, minRect): # хотя бы одна точка лежит внутри minY,", "одна точка лежит внутри minY, minX, maxY, maxX = bigRect", "or coordinates[1] >= coordinates[3]: raise ValueError( \"Неверно заданы вершины, сначала", "dtype=np.uint8) fig, ax = plt.subplots(1) ax.imshow(im) bigRect = Rectangle(searchRect) minRects", "self.RUy = coordinates def getWidth(self): return self.RUx - self.LDx def", "minRect, innerPercent=0.5): # объект частично внутри прямоугольника bigLUy, bigLUx, bigRDy,", "minLUy = bigLUy if bigRDy < minRDy: minRDy = bigRDy", "return self.RUy def getMTparam(self): return ((self.getLUy(), self.getLUx()), # почему -?", "or b or c or d def isCompletelyInside(bigRect, minRect): #", "внутри minY, minX, maxY, maxX = bigRect y1, x1, y2,", "patches im = np.array(Image.open(imagePath), dtype=np.uint8) fig, ax = plt.subplots(1) ax.imshow(im)", "полностью внутри большого прямоугольника def isPartiallyInside(bigRect, minRect, innerPercent=0.5): # объект", "temp.name) plt.savefig(path) return os.path.split(temp.name + \".png\") class Rectangle: LDx =", "= bigRect y1, x1, y2, x2 = minRect a =", "and d # если тру, то объект полностью внутри большого", "= bigRect minLUy, minLUx, minRDy, minRDx = minRect fullSquare =", "что правильно # Не уверен в ифах if bigLUy <", "as np import matplotlib.patches as patches im = np.array(Image.open(imagePath), dtype=np.uint8)", "plt.subplots(1) ax.imshow(im) bigRect = Rectangle(searchRect) minRects = [Rectangle(i) for i", "знаю -self.getHeight(), self.getWidth()) # все абсолютно в другом порядке, чем", "x2 = bigRect minX = x1 minY = y1 #", "c or d def isCompletelyInside(bigRect, minRect): # объект полностью внутри", "верхнего правого\") self.LDx, self.LDy, self.RUx, self.RUy = coordinates def getWidth(self):", "numpy as np import matplotlib.patches as patches im = np.array(Image.open(imagePath),", "нижнего левого угла, потом верхнего правого\") self.LDx, self.LDy, self.RUx, self.RUy", "bigRect = Rectangle(searchRect) minRects = [Rectangle(i) for i in objectsListRect]", "coordinates[0] >= coordinates[2] or coordinates[1] >= coordinates[3]: raise ValueError( \"Неверно", "LDy = 0 RUx = 0 RUy = 0 def", "np.array(Image.open(imagePath), dtype=np.uint8) fig, ax = plt.subplots(1) ax.imshow(im) bigRect = Rectangle(searchRect)", "(minLUy - minRDy) * (minRDx - minLUx) # не уверен", "= x1 minY = y1 # вроде верно maxX =", "((self.getLUy(), self.getLUx()), # почему -? я не знаю -self.getHeight(), self.getWidth())", "(minY <= y2 <= maxY) d = (minX <= x2", "def hasOnePointInside(bigRect, minRect): # хотя бы одна точка лежит внутри", "<= maxY) d = (minX <= x2 <= maxX) return", "minY, minX, maxY, maxX = bigRect y1, x1, y2, x2", "тру, то объект полностью внутри большого прямоугольника def isPartiallyInside(bigRect, minRect,", "если тру, то объект полностью внутри большого прямоугольника def isPartiallyInside(bigRect,", "isPartiallyInside(bigRect, minRect, innerPercent=0.5): # объект частично внутри прямоугольника bigLUy, bigLUx,", "Не уверен в ифах if bigLUy < minLUy: minLUy =", "isCompletelyInside(bigRect, minRect): # объект полностью внутри прямоугольника y1, x1, y2,", "= patches.Rectangle(*bigRect.getMTparam(), linewidth=1, edgecolor='g', facecolor='None') ax.add_patch(rect) for i in minRects:", "2 координаты нижнего левого угла, потом верхнего правого\") self.LDx, self.LDy,", "minLUx, minRDy, minRDx = minRect fullSquare = (minLUy - minRDy)", "большого прямоугольника def isPartiallyInside(bigRect, minRect, innerPercent=0.5): # объект частично внутри", "бы одна точка лежит внутри minY, minX, maxY, maxX =", "return self.RUy - self.LDy def getLUx(self): return self.LDx def getLUy(self):", "c and d # если тру, то объект полностью внутри", "= bigRect minX = x1 minY = y1 # вроде", "self.LDy, self.RUx, self.RUy = coordinates def getWidth(self): return self.RUx -", "ax.add_patch(rect) temp = tempfile.NamedTemporaryFile() path = os.path.join(os.getcwd(), temp.name) plt.savefig(path) return", "maxX = x2 maxY = y2 y1, x1, y2, x2", "потом верхнего правого\") self.LDx, self.LDy, self.RUx, self.RUy = coordinates def", "self.LDx, self.LDy, self.RUx, self.RUy = coordinates def getWidth(self): return self.RUx", "minX, maxY, maxX = bigRect y1, x1, y2, x2 =", "хотя бы одна точка лежит внутри minY, minX, maxY, maxX", "list, objectsListRect: list): import matplotlib.pyplot as plt from PIL import", "объект полностью внутри прямоугольника y1, x1, y2, x2 = bigRect", "in minRects: rect = patches.Rectangle(*i.getMTparam(), linewidth=1, edgecolor='r', facecolor='none') ax.add_patch(rect) temp", "plt from PIL import Image import numpy as np import", "searchRect: list, objectsListRect: list): import matplotlib.pyplot as plt from PIL", "чем должно быть? что ха дринся def getCenterOfDown(self): return [(self.LDx", "bigRDy if bigLUx > minLUx: minLUx = bigLUx if bigRDx", "координаты нижнего левого угла, потом верхнего правого\") self.LDx, self.LDy, self.RUx,", "+ \".png\") class Rectangle: LDx = 0 LDy = 0", "быть? что ха дринся def getCenterOfDown(self): return [(self.LDx + self.RUx)", "y1, x1, y2, x2 = bigRect minX = x1 minY", "maxY) d = (minX <= x2 <= maxX) return a", "<= maxX) c = (minY <= y2 <= maxY) d", "<= y2 <= maxY) d = (minX <= x2 <=", "bigRect minX = x1 minY = y1 # вроде верно", "\".png\") class Rectangle: LDx = 0 LDy = 0 RUx", "= (minY <= y2 <= maxY) d = (minX <=", "minRDy = bigRDy if bigLUx > minLUx: minLUx = bigLUx", "ха дринся def getCenterOfDown(self): return [(self.LDx + self.RUx) / 2,", "подаются 2 координаты нижнего левого угла, потом верхнего правого\") self.LDx,", "4: raise ValueError(\"Нужно подавать координаты(х,у) двух противоложных вершин\") if coordinates[0]", "RUy = 0 def __init__(self, coordinates: list): if len(coordinates) !=", "import tempfile def hasOnePointInside(bigRect, minRect): # хотя бы одна точка", "fullSquare >= innerPercent def createGraphic(imagePath: str, searchRect: list, objectsListRect: list):", "import Image import numpy as np import matplotlib.patches as patches", "patches.Rectangle(*i.getMTparam(), linewidth=1, edgecolor='r', facecolor='none') ax.add_patch(rect) temp = tempfile.NamedTemporaryFile() path =", "правого\") self.LDx, self.LDy, self.RUx, self.RUy = coordinates def getWidth(self): return", "x2 maxY = y2 y1, x1, y2, x2 = minRect", "противоложных вершин\") if coordinates[0] >= coordinates[2] or coordinates[1] >= coordinates[3]:", "bigRDy < minRDy: minRDy = bigRDy if bigLUx > minLUx:", "* (minRDx - minLUx) # не уверен что правильно #", "# если тру, то объект полностью внутри большого прямоугольника def", "innerPercent def createGraphic(imagePath: str, searchRect: list, objectsListRect: list): import matplotlib.pyplot", "and c and d # если тру, то объект полностью", "def getLUy(self): return self.RUy def getMTparam(self): return ((self.getLUy(), self.getLUx()), #", "getMTparam(self): return ((self.getLUy(), self.getLUx()), # почему -? я не знаю", "= (minLUy - minRDy) * (minRDx - minLUx) return inObjSquare", "maxY = y2 y1, x1, y2, x2 = minRect a", "c = (minY <= y2 <= maxY) d = (minX", "y1 # вроде верно maxX = x2 maxY = y2", "= (minX <= x2 <= maxX) return a and b", "в ифах if bigLUy < minLUy: minLUy = bigLUy if", "ax.imshow(im) bigRect = Rectangle(searchRect) minRects = [Rectangle(i) for i in", "import matplotlib.patches as patches im = np.array(Image.open(imagePath), dtype=np.uint8) fig, ax", ">= coordinates[3]: raise ValueError( \"Неверно заданы вершины, сначала подаются 2", "не знаю -self.getHeight(), self.getWidth()) # все абсолютно в другом порядке,", "return self.RUx - self.LDx def getHeight(self): return self.RUy - self.LDy", "fig, ax = plt.subplots(1) ax.imshow(im) bigRect = Rectangle(searchRect) minRects =", "fullSquare = (minLUy - minRDy) * (minRDx - minLUx) #", "return os.path.split(temp.name + \".png\") class Rectangle: LDx = 0 LDy", "# Не уверен в ифах if bigLUy < minLUy: minLUy", "inObjSquare = (minLUy - minRDy) * (minRDx - minLUx) return", "minRDy: minRDy = bigRDy if bigLUx > minLUx: minLUx =", "лежит внутри minY, minX, maxY, maxX = bigRect y1, x1,", "getWidth(self): return self.RUx - self.LDx def getHeight(self): return self.RUy -", "- self.LDy def getLUx(self): return self.LDx def getLUy(self): return self.RUy", "matplotlib.patches as patches im = np.array(Image.open(imagePath), dtype=np.uint8) fig, ax =", "minRect): # объект полностью внутри прямоугольника y1, x1, y2, x2", "уверен в ифах if bigLUy < minLUy: minLUy = bigLUy", "= (minX <= x2 <= maxX) return a or b", "x1 <= maxX) c = (minY <= y2 <= maxY)", "minRDy, minRDx = minRect fullSquare = (minLUy - minRDy) *", "a and b and c and d # если тру,", "прямоугольника bigLUy, bigLUx, bigRDy, bigRDx = bigRect minLUy, minLUx, minRDy,", "заданы вершины, сначала подаются 2 координаты нижнего левого угла, потом", "maxY) b = (minX <= x1 <= maxX) c =", "верно maxX = x2 maxY = y2 y1, x1, y2,", "facecolor='none') ax.add_patch(rect) temp = tempfile.NamedTemporaryFile() path = os.path.join(os.getcwd(), temp.name) plt.savefig(path)", "вершины, сначала подаются 2 координаты нижнего левого угла, потом верхнего", "# хотя бы одна точка лежит внутри minY, minX, maxY,", "Rectangle: LDx = 0 LDy = 0 RUx = 0", "= minRect a = (minY <= y1 <= maxY) b", "in objectsListRect] rect = patches.Rectangle(*bigRect.getMTparam(), linewidth=1, edgecolor='g', facecolor='None') ax.add_patch(rect) for", "# объект полностью внутри прямоугольника y1, x1, y2, x2 =", "bigRect y1, x1, y2, x2 = minRect a = (minY", "edgecolor='r', facecolor='none') ax.add_patch(rect) temp = tempfile.NamedTemporaryFile() path = os.path.join(os.getcwd(), temp.name)", "= minRect fullSquare = (minLUy - minRDy) * (minRDx -", "if bigLUx > minLUx: minLUx = bigLUx if bigRDx >", "return a and b and c and d # если", "bigRDx > minRDx: minRDx = bigRDx inObjSquare = (minLUy -", "linewidth=1, edgecolor='g', facecolor='None') ax.add_patch(rect) for i in minRects: rect =", "maxY, maxX = bigRect y1, x1, y2, x2 = minRect", "matplotlib.pyplot as plt from PIL import Image import numpy as", "if bigLUy < minLUy: minLUy = bigLUy if bigRDy <", "почему -? я не знаю -self.getHeight(), self.getWidth()) # все абсолютно", "coordinates[3]: raise ValueError( \"Неверно заданы вершины, сначала подаются 2 координаты", "!= 4: raise ValueError(\"Нужно подавать координаты(х,у) двух противоложных вершин\") if", "path = os.path.join(os.getcwd(), temp.name) plt.savefig(path) return os.path.split(temp.name + \".png\") class", "y2 y1, x1, y2, x2 = minRect a = (minY", "class Rectangle: LDx = 0 LDy = 0 RUx =", "patches.Rectangle(*bigRect.getMTparam(), linewidth=1, edgecolor='g', facecolor='None') ax.add_patch(rect) for i in minRects: rect", "b or c or d def isCompletelyInside(bigRect, minRect): # объект", "LDx = 0 LDy = 0 RUx = 0 RUy", "вершин\") if coordinates[0] >= coordinates[2] or coordinates[1] >= coordinates[3]: raise", "for i in minRects: rect = patches.Rectangle(*i.getMTparam(), linewidth=1, edgecolor='r', facecolor='none')", "minRDx = minRect fullSquare = (minLUy - minRDy) * (minRDx", "\"Неверно заданы вершины, сначала подаются 2 координаты нижнего левого угла,", "list): if len(coordinates) != 4: raise ValueError(\"Нужно подавать координаты(х,у) двух", "другом порядке, чем должно быть? что ха дринся def getCenterOfDown(self):", "d # если тру, то объект полностью внутри большого прямоугольника", "y2, x2 = minRect a = (minY <= y1 <=", "сначала подаются 2 координаты нижнего левого угла, потом верхнего правого\")", "def isPartiallyInside(bigRect, minRect, innerPercent=0.5): # объект частично внутри прямоугольника bigLUy,", "np import matplotlib.patches as patches im = np.array(Image.open(imagePath), dtype=np.uint8) fig,", "(minRDx - minLUx) return inObjSquare / fullSquare >= innerPercent def", "def createGraphic(imagePath: str, searchRect: list, objectsListRect: list): import matplotlib.pyplot as", "minLUx) # не уверен что правильно # Не уверен в", "self.LDx def getHeight(self): return self.RUy - self.LDy def getLUx(self): return", "не уверен что правильно # Не уверен в ифах if", "im = np.array(Image.open(imagePath), dtype=np.uint8) fig, ax = plt.subplots(1) ax.imshow(im) bigRect", "<= maxX) return a and b and c and d", "def __init__(self, coordinates: list): if len(coordinates) != 4: raise ValueError(\"Нужно", "minLUy: minLUy = bigLUy if bigRDy < minRDy: minRDy =", "or c or d def isCompletelyInside(bigRect, minRect): # объект полностью", "внутри большого прямоугольника def isPartiallyInside(bigRect, minRect, innerPercent=0.5): # объект частично", "прямоугольника def isPartiallyInside(bigRect, minRect, innerPercent=0.5): # объект частично внутри прямоугольника", "edgecolor='g', facecolor='None') ax.add_patch(rect) for i in minRects: rect = patches.Rectangle(*i.getMTparam(),", "(minY <= y1 <= maxY) b = (minX <= x1", "b = (minX <= x1 <= maxX) c = (minY", "= 0 def __init__(self, coordinates: list): if len(coordinates) != 4:", "minRects: rect = patches.Rectangle(*i.getMTparam(), linewidth=1, edgecolor='r', facecolor='none') ax.add_patch(rect) temp =", "rect = patches.Rectangle(*bigRect.getMTparam(), linewidth=1, edgecolor='g', facecolor='None') ax.add_patch(rect) for i in", "import matplotlib.pyplot as plt from PIL import Image import numpy", "self.RUy def getMTparam(self): return ((self.getLUy(), self.getLUx()), # почему -? я", "def isCompletelyInside(bigRect, minRect): # объект полностью внутри прямоугольника y1, x1,", "y1 <= maxY) b = (minX <= x1 <= maxX)", "= x2 maxY = y2 y1, x1, y2, x2 =", "внутри прямоугольника y1, x1, y2, x2 = bigRect minX =", "-self.getHeight(), self.getWidth()) # все абсолютно в другом порядке, чем должно", "createGraphic(imagePath: str, searchRect: list, objectsListRect: list): import matplotlib.pyplot as plt", "x1, y2, x2 = minRect a = (minY <= y1", "self.LDy def getLUx(self): return self.LDx def getLUy(self): return self.RUy def", "вроде верно maxX = x2 maxY = y2 y1, x1,", "coordinates: list): if len(coordinates) != 4: raise ValueError(\"Нужно подавать координаты(х,у)", "< minRDy: minRDy = bigRDy if bigLUx > minLUx: minLUx", "def getWidth(self): return self.RUx - self.LDx def getHeight(self): return self.RUy", "facecolor='None') ax.add_patch(rect) for i in minRects: rect = patches.Rectangle(*i.getMTparam(), linewidth=1,", "return self.LDx def getLUy(self): return self.RUy def getMTparam(self): return ((self.getLUy(),", "= plt.subplots(1) ax.imshow(im) bigRect = Rectangle(searchRect) minRects = [Rectangle(i) for", "os.path.join(os.getcwd(), temp.name) plt.savefig(path) return os.path.split(temp.name + \".png\") class Rectangle: LDx", "as patches im = np.array(Image.open(imagePath), dtype=np.uint8) fig, ax = plt.subplots(1)", "if len(coordinates) != 4: raise ValueError(\"Нужно подавать координаты(х,у) двух противоложных", "self.getLUx()), # почему -? я не знаю -self.getHeight(), self.getWidth()) #", "0 RUx = 0 RUy = 0 def __init__(self, coordinates:", "0 LDy = 0 RUx = 0 RUy = 0", "= bigLUx if bigRDx > minRDx: minRDx = bigRDx inObjSquare", "d = (minX <= x2 <= maxX) return a or", "coordinates[2] or coordinates[1] >= coordinates[3]: raise ValueError( \"Неверно заданы вершины," ]
[ "not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline must", "If not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The Rectangle fill", "# Checking if p1's x value is greater than p2's.", "self.height # abs(p2[0] - p1[0]) is not required because the", "is always greater than or equal to the p1 value", "value is greater than p2's. If so, then swap the", "False and left is False and right is False: if", "self in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self) elif self not in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self)", "def get_outline_width(self): return self.outline_width def get_anchor(self): return self.anchor def set_dimensions(self,", "x value is greater than p2's. If so, then swap", "return self.outline def get_outline_width(self): return self.outline_width def get_anchor(self): return self.anchor", "> self.p2[1]: # Checking if p1's y value is greater", "self.p1[1] += dy self.p2[0] += dx self.p2[1] += dy self.anchor[0]", "{\"center\", \"top\", \"bottom\"}: raise GraphicsError( \"\\n\\nThe center argument for resizing", "Right now, min_width={min_width} & min_height={min_height}\") self.min_width = min_width self.min_height =", "f'{[\"center\", \"right\", \"left\"]}') if center == \"left\": self.set_coords(self.p1, self.p2.add_x(width -", "// 2, (self.p1[1] + self.p2[1]) // 2] GraphicsObject.__init__(self, options=(), cursor=cursor,", "2, self.p2[1]] def get_left(self): return [self.p1[0], (self.p1[1] + self.p2[1]) /", "self.p1[0] > mouse_pos[0] > self.p2[0]) and \\ (self.p1[1] < mouse_pos[1]", "= self.p2[0] - self.p1[0] self.height = self.p2[1] - self.p1[1] width_scale", "2 - self.width)), self.p2.add_x(width / 2 - self.width)) return self", "False else: return self.bounds.is_clicked(mouse_pos) def get_p1(self): return self.p1.copy() def get_p2(self):", "width must be an integer, not {outline_width}\") self._update_layer() return self", "def get_top(self): return [(self.p2[0] + self.p1[0]) / 2, self.p1[1]] def", "set_fill(self, fill): if fill is None: self.fill = STYLES[\"default\"][\"fill\"] elif", "self.width def get_height(self): return self.height def get_fill(self): return self.fill def", "is False and bottom is False and left is False", "If so, then swap the values self.p1[0], self.p2[0] = self.p2[0],", "(p2[0] - p1[0]) / self.width height_scale = (p2[1] - p1[1])", "self.width)), self.p2.add_x(width / 2 - self.width)) return self def set_height(self,", "self.fill = fill else: # If not, raise an error", "False and bottom is False and left is False and", "so, then swap the values self.p1[1], self.p2[1] = self.p2[1], self.p1[1]", "raise GraphicsError( \"\\n\\nThe center argument for resizing the object (set_outline_width)", "self.p2[1]] def get_left(self): return [self.p1[0], (self.p1[1] + self.p2[1]) / 2]", "\"bottom\": self.set_coords(self.p1.add_y(-(height - self.height)), self.p2) else: self.set_coords(self.p1.add_y(-(height / 2 -", "min_width=40, min_height=40, bounds_width=10, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None): if min_width <", "\" f\"or equal to 1. Right now, min_width={min_width} & min_height={min_height}\")", "subclasses\"\"\" pass def _move(self, dx, dy): self.p1[0] += dx self.p1[1]", "[self.p2[0], (self.p1[1] + self.p2[1]) / 2] def get_width(self): return self.width", "self.width)) return self def set_height(self, height, center=\"center\"): if center not", "outline_width else: # If not, raise an error raise GraphicsError(", "a degenerate case. resizing_objects = [] def __init__(self, p1, p2,", "get_outline_width(self): return self.outline_width def get_anchor(self): return self.anchor def set_dimensions(self, width,", "not required because the p2 value is always greater than", "\\ (self.p1[1] < mouse_pos[1] < self.p2[1] or self.p1[1] > mouse_pos[1]", "the p2 value is always greater than or equal to", "in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self) self.bounds_thickness = bounds_width return self def set_coords(self,", "bounds_width return self def set_coords(self, p1, p2): self.p1 = p1.copy()", "than or equal to the p1 value self.width = self.p2[0]", "value is always greater than or equal to the p1", "+= dy self.p2[0] += dx self.p2[1] += dy self.anchor[0] +=", "= self.p2[1], self.p1[1] # abs(p2[0] - p1[0]) is not required", "class for objects represented by bounding box # (opposite corners)", "greater than that of p1 if self.p1[0] > self.p2[0]: #", "option is an integer self.outline_width = outline_width else: # If", "get_p1(self): return self.p1.copy() def get_p2(self): return self.p2.copy() def get_top_right(self): return", "top_bounds=top_bounds, bottom_bounds=bottom_bounds, left_bounds=left_bounds, right_bounds=right_bounds, thickness=bounds_width) if top is False and", "one of \" f'{[\"center\", \"right\", \"left\"]}') if center == \"left\":", "2 - self.height)), self.p2.add_y(height / 2 - self.height)) return self", "value of p2 is greater than that of p1 if", "self.min_height = None self.max_width = None self.max_height = None self.resizing_bounds", "get_p2(self): return self.p2.copy() def get_top_right(self): return self.p1.copy() def get_top_left(self): return", "= None self.resizing_bounds = {} self.is_resizing = {} self.bounds_thickness =", "> mouse_pos[0] > self.p2[0]) and \\ (self.p1[1] < mouse_pos[1] <", ", not {fill}\") if outline is None: self.outline = STYLES[\"default\"][\"outline\"]", "def get_top_left(self): return [self.p2[0], self.p1[1]] def get_bottom_left(self): return [self.p1[0], self.p2[1]]", "& y value of p2 is greater than that of", "must be greater than \" f\"or equal to 1. Right", "self.max_width = None self.max_height = None self.resizing_bounds = {} self.is_resizing", "# If not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle", "one of \" f'{[\"center\", \"top\", \"bottom\"]}') if center == \"top\":", "is an integer self.outline_width = outline_width else: # If not,", "< 1 or min_height < 1: raise GraphicsError(f\"\\n\\nGraphicsError: Minimum height", "be a Colour object , not {fill}\") if outline is", "right], top_bounds=top_bounds, bottom_bounds=bottom_bounds, left_bounds=left_bounds, right_bounds=right_bounds, thickness=bounds_width) if top is False", "bottom, left, right], top_bounds=top_bounds, bottom_bounds=bottom_bounds, left_bounds=left_bounds, right_bounds=right_bounds, thickness=bounds_width) if top", "not {outline_width}\") def __repr__(self): return \"_BBox\" def _set_resizable(self, resizables, top_bounds=None,", "Minimum height and width of resizable object must be greater", "be a Colour object , not {outline}\") if outline_width is", "self.anchor[0] += dx self.anchor[1] += dy def is_clicked(self, mouse_pos): if", "f\"or equal to 1. Right now, min_width={min_width} & min_height={min_height}\") self.min_width", "def set_fill(self, fill): if fill is None: self.fill = STYLES[\"default\"][\"fill\"]", "- self.p1[0] self.height = self.p2[1] - self.p1[1] width_scale = (p2[0]", "# Checking if the option is an integer self.outline_width =", "self.p1[0]) / 2, self.p1[1]] def get_bottom(self): return [(self.p2[0] + self.p1[0])", "right=False, min_width=40, min_height=40, bounds_width=10, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None): if min_width", "option is a colour self.fill = fill else: # If", "self.p2[0] = self.p2[0], self.p1[0] if self.p1[1] > self.p2[1]: # Checking", "the p1 value self.width = p2[0] - p1[0] self.height =", "# abs(p2[0] - p1[0]) is not required because the p2", "self.p1.copy() def get_p2(self): return self.p2.copy() def get_top_right(self): return self.p1.copy() def", "p1[0] self.height = p2[1] - p1[1] self.anchor = [(self.p1[0] +", "/ 2 - self.width)) return self def set_height(self, height, center=\"center\"):", "height, center=\"center\"): if center not in {\"center\", \"top\", \"bottom\"}: raise", "because the p2 value is always greater than or equal", "elif center == \"bottom\": self.set_coords(self.p1.add_y(-(height - self.height)), self.p2) else: self.set_coords(self.p1.add_y(-(height", "= None self.min_height = None self.max_width = None self.max_height =", "p2 # These make sure that the p2 is 'after'", "is None: self.outline = STYLES[\"default\"][\"outline\"] elif isinstance(outline, Colour): # Checking", "for objects represented by bounding box # (opposite corners) Line", "return self def set_width(self, width, center=\"center\"): if center not in", "(self.p1[1] + self.p2[1]) / 2] def get_right(self): return [self.p2[0], (self.p1[1]", ", not {outline}\") if outline_width is None: self.outline_width = STYLES[\"default\"][\"width\"]", "= p2.copy() # These make sure that the p2 is", "- self.width)), self.p2.add_x(width / 2 - self.width)) return self def", "- self.height)), self.p2) else: self.set_coords(self.p1.add_y(-(height / 2 - self.height)), self.p2.add_y(height", "then swap the values self.p1[1], self.p2[1] = self.p2[1], self.p1[1] #", "self.max_height = None self.resizing_bounds = {} self.is_resizing = {} self.bounds_thickness", "None: self.fill = STYLES[\"default\"][\"fill\"] elif isinstance(fill, Colour): # Checking if", "left, right], top_bounds=top_bounds, bottom_bounds=bottom_bounds, left_bounds=left_bounds, right_bounds=right_bounds, thickness=bounds_width) if top is", "an error raise GraphicsError( f\"\\n\\nGraphicsError: The rectangle outline width must", "(p2[1] - p1[1]) / self.height # abs(p2[0] - p1[0]) is", "/ 2 - self.width)), self.p2.add_x(width / 2 - self.width)) return", "- self.height)), self.p2.add_y(height / 2 - self.height)) return self def", "'after' p1, ie the x & y value of p2", "fill must be a Colour object , not {fill}\") self._update_layer()", "a colour self.outline = outline else: # If not, raise", "thickness=10): \"\"\"Override in subclasses\"\"\" pass def _move(self, dx, dy): self.p1[0]", "- p1[0]) is not required because the p2 value is", "[self.p1[0], (self.p1[1] + self.p2[1]) / 2] def get_right(self): return [self.p2[0],", "self.outline = STYLES[\"default\"][\"outline\"] elif isinstance(outline, Colour): # Checking if the", "return False else: if (self.p1[0] < mouse_pos[0] < self.p2[0] or", "self.p2[0] or self.p1[0] > mouse_pos[0] > self.p2[0]) and \\ (self.p1[1]", "now, min_width={min_width} & min_height={min_height}\") self.min_width = min_width self.min_height = min_height", "2] def get_width(self): return self.width def get_height(self): return self.height def", "import GraphicsObject from goopylib.styles import * class BBox(GraphicsObject): # Internal", "\" f'{[\"center\", \"right\", \"left\"]}') if center == \"left\": self.set_coords(self.p1, self.p2.add_x(width", "self.set_coords(self.p1.add_x(-(width - self.width)), self.p2) else: self.set_coords(self.p1.add_x(-(width / 2 - self.width)),", "tag=tag) # abs(p2[0] - p1[0]) is not required because the", "{\"center\", \"right\", \"left\"}: raise GraphicsError( \"\\n\\nThe center argument for resizing", "int): # Checking if the option is an integer self.outline_width", "self.set_coords(self.p1, self.p2.add_y(height - self.height)) elif center == \"bottom\": self.set_coords(self.p1.add_y(-(height -", "GraphicsObject.__init__(self, options=(), cursor=cursor, layer=layer, bounds=bounds, tag=tag) # abs(p2[0] - p1[0])", "is None: return False else: if (self.p1[0] < mouse_pos[0] <", "self.outline def get_outline_width(self): return self.outline_width def get_anchor(self): return self.anchor def", "\"right\", \"left\"]}') if center == \"left\": self.set_coords(self.p1, self.p2.add_x(width - self.width))", "if self.p1[1] > self.p2[1]: # Checking if p1's y value", "self.height)), self.p2) else: self.set_coords(self.p1.add_y(-(height / 2 - self.height)), self.p2.add_y(height /", "self.fill = STYLES[\"default\"][\"fill\"] elif isinstance(fill, Colour): # Checking if the", "(opposite corners) Line segment is a degenerate case. resizing_objects =", "by bounding box # (opposite corners) Line segment is a", "outline=None, outline_width=None, cursor=\"arrow\", layer=0, tag=None): self.p1 = p1 self.p2 =", "return self.p2.copy() def get_top(self): return [(self.p2[0] + self.p1[0]) / 2,", "must be a Colour object , not {outline}\") self._update_layer() return", "+ self.p1[0]) / 2, self.p2[1]] def get_left(self): return [self.p1[0], (self.p1[1]", "_set_resizable(self, resizables, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None, thickness=10): \"\"\"Override in subclasses\"\"\"", "self.p1[0] > self.p2[0]: # Checking if p1's x value is", "must be a Colour object , not {fill}\") if outline", "left_bounds=None, right_bounds=None): if min_width < 1 or min_height < 1:", "bounds=None, fill=None, outline=None, outline_width=None, cursor=\"arrow\", layer=0, tag=None): self.p1 = p1", "self._set_resizable([top, bottom, left, right], top_bounds=top_bounds, bottom_bounds=bottom_bounds, left_bounds=left_bounds, right_bounds=right_bounds, thickness=bounds_width) if", "not in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self) self.bounds_thickness = bounds_width return self def", "- self.p1[0] self.height = self.p2[1] - self.p1[1] self.min_width = None", "that of p1 if self.p1[0] > self.p2[0]: # Checking if", "error raise GraphicsError(f\"\\n\\nGraphicsError: The Rectangle fill must be a Colour", "[] def __init__(self, p1, p2, bounds=None, fill=None, outline=None, outline_width=None, cursor=\"arrow\",", "self.p1[0], self.p2[0] = self.p2[0], self.p1[0] if self.p1[1] > self.p2[1]: #", "if the option is a colour self.fill = fill else:", "from goopylib.objects.GraphicsObject import GraphicsObject from goopylib.styles import * class BBox(GraphicsObject):", "y value of p2 is greater than that of p1", "\"left\": left, \"bottom\": bottom, \"right\": right} self._set_resizable([top, bottom, left, right],", "= (p2[1] - p1[1]) / self.height # abs(p2[0] - p1[0])", "object , not {outline}\") self._update_layer() return self def set_outline_width(self, outline_width):", "of p2 is greater than that of p1 if self.p1[0]", "self.height)) elif center == \"bottom\": self.set_coords(self.p1.add_y(-(height - self.height)), self.p2) else:", "return self def set_outline(self, outline): if outline is None: self.outline", "get_bottom_left(self): return [self.p1[0], self.p2[1]] def get_bottom_right(self): return self.p2.copy() def get_top(self):", "or equal to the p1 value self.width = self.p2[0] -", "Colour object , not {outline}\") if outline_width is None: self.outline_width", "STYLES[\"default\"][\"width\"] elif isinstance(outline_width, int): # Checking if the option is", "return self def set_coords(self, p1, p2): self.p1 = p1.copy() self.p2", "\"\\n\\nThe center argument for resizing the object (set_height) needs to", "the object (set_height) needs to be one of \" f'{[\"center\",", "GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline width must be an integer, not", "else: return False else: return self.bounds.is_clicked(mouse_pos) def get_p1(self): return self.p1.copy()", "self def set_outline(self, outline): if outline is None: self.outline =", "resizable object must be greater than \" f\"or equal to", "Colour): # Checking if the option is a colour self.fill", "Colour object , not {fill}\") self._update_layer() return self def set_outline(self,", "error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline must be a Colour", "return self.p1.copy() def get_top_left(self): return [self.p2[0], self.p1[1]] def get_bottom_left(self): return", "raise GraphicsError( f\"\\n\\nGraphicsError: The rectangle outline width must be an", "__repr__(self): return \"_BBox\" def _set_resizable(self, resizables, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None,", "self.is_resizing = {\"top\": top, \"left\": left, \"bottom\": bottom, \"right\": right}", "outline): if outline is None: self.outline = STYLES[\"default\"][\"outline\"] elif isinstance(outline,", "def set_outline(self, outline): if outline is None: self.outline = STYLES[\"default\"][\"outline\"]", "p2's. If so, then swap the values self.p1[0], self.p2[0] =", "self.p2[1] += dy self.anchor[0] += dx self.anchor[1] += dy def", "bottom_bounds=None, left_bounds=None, right_bounds=None, thickness=10): \"\"\"Override in subclasses\"\"\" pass def _move(self,", "self.p1[1] > self.p2[1]: # Checking if p1's y value is", "self.fill def get_outline(self): return self.outline def get_outline_width(self): return self.outline_width def", "object (set_outline_width) needs to be one of \" f'{[\"center\", \"right\",", "self def set_height(self, height, center=\"center\"): if center not in {\"center\",", "\"bottom\": bottom, \"right\": right} self._set_resizable([top, bottom, left, right], top_bounds=top_bounds, bottom_bounds=bottom_bounds,", "dx self.p1[1] += dy self.p2[0] += dx self.p2[1] += dy", "def get_top_right(self): return self.p1.copy() def get_top_left(self): return [self.p2[0], self.p1[1]] def", "self.p1.copy() def get_top_left(self): return [self.p2[0], self.p1[1]] def get_bottom_left(self): return [self.p1[0],", "self.p2[1] = self.p2[1], self.p1[1] self.anchor = [(self.p1[0] + self.p2[0]) //", "return self.anchor def set_dimensions(self, width, height, horizontal_align=\"center\", vertical_align=\"center\"): self.set_width(width, horizontal_align)", "min_height=40, bounds_width=10, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None): if min_width < 1", "< 1: raise GraphicsError(f\"\\n\\nGraphicsError: Minimum height and width of resizable", "argument for resizing the object (set_outline_width) needs to be one", "dy def is_clicked(self, mouse_pos): if self.bounds is None: if mouse_pos", "raise GraphicsError( \"\\n\\nThe center argument for resizing the object (set_height)", "swap the values self.p1[0], self.p2[0] = self.p2[0], self.p1[0] if self.p1[1]", "{} self.is_resizing = {} self.bounds_thickness = 0 if fill is", "/ 2, self.p1[1]] def get_bottom(self): return [(self.p2[0] + self.p1[0]) /", "to 1. Right now, min_width={min_width} & min_height={min_height}\") self.min_width = min_width", "left_bounds=left_bounds, right_bounds=right_bounds, thickness=bounds_width) if top is False and bottom is", "{fill}\") self._update_layer() return self def set_outline(self, outline): if outline is", "self.p1[1] width_scale = (p2[0] - p1[0]) / self.width height_scale =", "horizontal_align=\"center\", vertical_align=\"center\"): self.set_width(width, horizontal_align) self.set_height(height, vertical_align) return self def set_resizable(self,", "pass def _move(self, dx, dy): self.p1[0] += dx self.p1[1] +=", "set_dimensions(self, width, height, horizontal_align=\"center\", vertical_align=\"center\"): self.set_width(width, horizontal_align) self.set_height(height, vertical_align) return", "the values self.p1[1], self.p2[1] = self.p2[1], self.p1[1] # abs(p2[0] -", "self.p2[0]: # Checking if p1's x value is greater than", "an integer self.outline_width = outline_width else: # If not, raise", "is_clicked(self, mouse_pos): if self.bounds is None: if mouse_pos is None:", "if min_width < 1 or min_height < 1: raise GraphicsError(f\"\\n\\nGraphicsError:", "get_outline(self): return self.outline def get_outline_width(self): return self.outline_width def get_anchor(self): return", "\"_BBox\" def _set_resizable(self, resizables, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None, thickness=10): \"\"\"Override", "1: raise GraphicsError(f\"\\n\\nGraphicsError: Minimum height and width of resizable object", "p2[1] - p1[1] self.anchor = [(self.p1[0] + self.p2[0]) // 2,", "(self.p1[0] < mouse_pos[0] < self.p2[0] or self.p1[0] > mouse_pos[0] >", "p1[0]) is not required because the p2 value is always", "get_anchor(self): return self.anchor def set_dimensions(self, width, height, horizontal_align=\"center\", vertical_align=\"center\"): self.set_width(width,", "GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self) self.bounds_thickness = bounds_width return self def set_coords(self, p1,", "in {\"center\", \"top\", \"bottom\"}: raise GraphicsError( \"\\n\\nThe center argument for", "outline_width=None, cursor=\"arrow\", layer=0, tag=None): self.p1 = p1 self.p2 = p2", "f\"\\n\\nGraphicsError: The rectangle outline width must be an integer, not", "self.p2 = p2.copy() # These make sure that the p2", "self.width = self.p2[0] - self.p1[0] self.height = self.p2[1] - self.p1[1]", "# If not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The Rectangle", "dy self.p2[0] += dx self.p2[1] += dy self.anchor[0] += dx", "p1[0]) / self.width height_scale = (p2[1] - p1[1]) / self.height", "p2, bounds=None, fill=None, outline=None, outline_width=None, cursor=\"arrow\", layer=0, tag=None): self.p1 =", "self.p2[1]) / 2] def get_right(self): return [self.p2[0], (self.p1[1] + self.p2[1])", "= outline else: # If not, raise an error raise", "GraphicsError(f\"\\n\\nGraphicsError: Minimum height and width of resizable object must be", "= bounds_width return self def set_coords(self, p1, p2): self.p1 =", "equal to 1. Right now, min_width={min_width} & min_height={min_height}\") self.min_width =", "self.outline_width = outline_width else: # If not, raise an error", "than that of p1 if self.p1[0] > self.p2[0]: # Checking", "and bottom is False and left is False and right", "outline width must be an integer, not {outline_width}\") self._update_layer() return", "< self.p2[0] or self.p1[0] > mouse_pos[0] > self.p2[0]) and \\", "object must be greater than \" f\"or equal to 1.", "case. resizing_objects = [] def __init__(self, p1, p2, bounds=None, fill=None,", "swap the values self.p1[1], self.p2[1] = self.p2[1], self.p1[1] self.anchor =", "is None: self.fill = STYLES[\"default\"][\"fill\"] elif isinstance(fill, Colour): # Checking", "False: if self in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self) elif self not in", "self.bounds_thickness = 0 if fill is None: self.fill = STYLES[\"default\"][\"fill\"]", "min_width < 1 or min_height < 1: raise GraphicsError(f\"\\n\\nGraphicsError: Minimum", "return [self.p1[0], (self.p1[1] + self.p2[1]) / 2] def get_right(self): return", "self.bounds is None: if mouse_pos is None: return False else:", "self def set_outline_width(self, outline_width): if outline_width is None: self.outline_width =", "a colour self.fill = fill else: # If not, raise", "return [(self.p2[0] + self.p1[0]) / 2, self.p2[1]] def get_left(self): return", "+ self.p2[1]) / 2] def get_width(self): return self.width def get_height(self):", "from goopylib.styles import * class BBox(GraphicsObject): # Internal base class", "return self.bounds.is_clicked(mouse_pos) def get_p1(self): return self.p1.copy() def get_p2(self): return self.p2.copy()", "ie the x & y value of p2 is greater", "for resizing the object (set_height) needs to be one of", "return self def set_fill(self, fill): if fill is None: self.fill", "goopylib.styles import * class BBox(GraphicsObject): # Internal base class for", "- self.width)) return self def set_height(self, height, center=\"center\"): if center", "Colour object , not {fill}\") if outline is None: self.outline", "- self.p1[1] self.min_width = None self.min_height = None self.max_width =", "so, then swap the values self.p1[0], self.p2[0] = self.p2[0], self.p1[0]", "to be one of \" f'{[\"center\", \"top\", \"bottom\"]}') if center", "elif isinstance(fill, Colour): # Checking if the option is a", "be a Colour object , not {fill}\") self._update_layer() return self", "STYLES[\"default\"][\"fill\"] elif isinstance(fill, Colour): # Checking if the option is", "+= dy def is_clicked(self, mouse_pos): if self.bounds is None: if", "self.outline = outline else: # If not, raise an error", "objects represented by bounding box # (opposite corners) Line segment", "GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self) elif self not in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self) self.bounds_thickness =", "GraphicsObject.resizing_objects.add(self) self.bounds_thickness = bounds_width return self def set_coords(self, p1, p2):", "set_coords(self, p1, p2): self.p1 = p1.copy() self.p2 = p2.copy() #", "// 2] self._update_layer() return self def set_width(self, width, center=\"center\"): if", "center argument for resizing the object (set_height) needs to be", "argument for resizing the object (set_height) needs to be one", "the option is an integer self.outline_width = outline_width else: #", "= [(self.p1[0] + self.p2[0]) // 2, (self.p1[1] + self.p2[1]) //", "options=(), cursor=cursor, layer=layer, bounds=bounds, tag=tag) # abs(p2[0] - p1[0]) is", "self.height def get_fill(self): return self.fill def get_outline(self): return self.outline def", "isinstance(outline, Colour): # Checking if the option is a colour", "self.set_coords(self.p1.add_y(-(height - self.height)), self.p2) else: self.set_coords(self.p1.add_y(-(height / 2 - self.height)),", "right is False: if self in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self) elif self", "p1 value self.width = p2[0] - p1[0] self.height = p2[1]", "center == \"top\": self.set_coords(self.p1, self.p2.add_y(height - self.height)) elif center ==", "[(self.p2[0] + self.p1[0]) / 2, self.p1[1]] def get_bottom(self): return [(self.p2[0]", "than p2's. If so, then swap the values self.p1[0], self.p2[0]", "get_bottom_right(self): return self.p2.copy() def get_top(self): return [(self.p2[0] + self.p1[0]) /", "= (p2[0] - p1[0]) / self.width height_scale = (p2[1] -", "2] def get_right(self): return [self.p2[0], (self.p1[1] + self.p2[1]) / 2]", "if self.bounds is None: if mouse_pos is None: return False", "{outline}\") self._update_layer() return self def set_outline_width(self, outline_width): if outline_width is", "= STYLES[\"default\"][\"outline\"] elif isinstance(outline, Colour): # Checking if the option", "== \"right\": self.set_coords(self.p1.add_x(-(width - self.width)), self.p2) else: self.set_coords(self.p1.add_x(-(width / 2", "- self.width)), self.p2) else: self.set_coords(self.p1.add_x(-(width / 2 - self.width)), self.p2.add_x(width", "error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline width must be an", "def get_p2(self): return self.p2.copy() def get_top_right(self): return self.p1.copy() def get_top_left(self):", "[(self.p2[0] + self.p1[0]) / 2, self.p2[1]] def get_left(self): return [self.p1[0],", "and right is False: if self in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self) elif", "p1 if self.p1[0] > self.p2[0]: # Checking if p1's x", "- p1[1]) / self.height # abs(p2[0] - p1[0]) is not", "def get_width(self): return self.width def get_height(self): return self.height def get_fill(self):", "{outline_width}\") def __repr__(self): return \"_BBox\" def _set_resizable(self, resizables, top_bounds=None, bottom_bounds=None,", "or equal to the p1 value self.width = p2[0] -", "set_height(self, height, center=\"center\"): if center not in {\"center\", \"top\", \"bottom\"}:", "fill must be a Colour object , not {fill}\") if", "get_left(self): return [self.p1[0], (self.p1[1] + self.p2[1]) / 2] def get_right(self):", "if mouse_pos is None: return False else: if (self.p1[0] <", "GraphicsError( f\"\\n\\nGraphicsError: The rectangle outline width must be an integer,", "object , not {fill}\") self._update_layer() return self def set_outline(self, outline):", "x & y value of p2 is greater than that", "+ self.p2[0]) // 2, (self.p1[1] + self.p2[1]) // 2] GraphicsObject.__init__(self,", "must be a Colour object , not {outline}\") if outline_width", "fill is None: self.fill = STYLES[\"default\"][\"fill\"] elif isinstance(fill, Colour): #", "return True else: return False else: return self.bounds.is_clicked(mouse_pos) def get_p1(self):", "be an integer, not {outline_width}\") def __repr__(self): return \"_BBox\" def", "or self.p1[0] > mouse_pos[0] > self.p2[0]) and \\ (self.p1[1] <", "(self.p1[1] + self.p2[1]) / 2] def get_width(self): return self.width def", "def is_clicked(self, mouse_pos): if self.bounds is None: if mouse_pos is", "is greater than that of p1 if self.p1[0] > self.p2[0]:", "/ self.width height_scale = (p2[1] - p1[1]) / self.height #", "if center == \"top\": self.set_coords(self.p1, self.p2.add_y(height - self.height)) elif center", "self.height = self.p2[1] - self.p1[1] self.min_width = None self.min_height =", "that the p2 is 'after' p1, ie the x &", "get_bottom(self): return [(self.p2[0] + self.p1[0]) / 2, self.p2[1]] def get_left(self):", "- self.height)) elif center == \"bottom\": self.set_coords(self.p1.add_y(-(height - self.height)), self.p2)", ", not {outline}\") self._update_layer() return self def set_outline_width(self, outline_width): if", "Checking if the option is an integer self.outline_width = outline_width", "the option is a colour self.outline = outline else: #", "degenerate case. resizing_objects = [] def __init__(self, p1, p2, bounds=None,", "# Checking if the option is a colour self.fill =", "right} self._set_resizable([top, bottom, left, right], top_bounds=top_bounds, bottom_bounds=bottom_bounds, left_bounds=left_bounds, right_bounds=right_bounds, thickness=bounds_width)", "p2.copy() # These make sure that the p2 is 'after'", "equal to the p1 value self.width = self.p2[0] - self.p1[0]", "return [self.p2[0], (self.p1[1] + self.p2[1]) / 2] def get_width(self): return", "sure that the p2 is 'after' p1, ie the x", "_move(self, dx, dy): self.p1[0] += dx self.p1[1] += dy self.p2[0]", "error raise GraphicsError( f\"\\n\\nGraphicsError: The rectangle outline width must be", "== \"top\": self.set_coords(self.p1, self.p2.add_y(height - self.height)) elif center == \"bottom\":", "None self.max_width = None self.max_height = None self.resizing_bounds = {}", "= {} self.is_resizing = {} self.bounds_thickness = 0 if fill", "self.width = p2[0] - p1[0] self.height = p2[1] - p1[1]", "of p1 if self.p1[0] > self.p2[0]: # Checking if p1's", "is not required because the p2 value is always greater", "- self.p1[1] width_scale = (p2[0] - p1[0]) / self.width height_scale", "be one of \" f'{[\"center\", \"right\", \"left\"]}') if center ==", "then swap the values self.p1[1], self.p2[1] = self.p2[1], self.p1[1] self.anchor", "else: # If not, raise an error raise GraphicsError( f\"\\n\\nGraphicsError:", "def get_bottom_right(self): return self.p2.copy() def get_top(self): return [(self.p2[0] + self.p1[0])", "= self.p2[1], self.p1[1] self.anchor = [(self.p1[0] + self.p2[0]) // 2,", "tag=None): self.p1 = p1 self.p2 = p2 # These make", "self.p2) else: self.set_coords(self.p1.add_y(-(height / 2 - self.height)), self.p2.add_y(height / 2", "// 2] GraphicsObject.__init__(self, options=(), cursor=cursor, layer=layer, bounds=bounds, tag=tag) # abs(p2[0]", "an error raise GraphicsError(f\"\\n\\nGraphicsError: The Rectangle fill must be a", "def __repr__(self): return \"_BBox\" def _set_resizable(self, resizables, top_bounds=None, bottom_bounds=None, left_bounds=None,", "width of resizable object must be greater than \" f\"or", "cursor=cursor, layer=layer, bounds=bounds, tag=tag) # abs(p2[0] - p1[0]) is not", "bottom_bounds=None, left_bounds=None, right_bounds=None): if min_width < 1 or min_height <", "# If not, raise an error raise GraphicsError( f\"\\n\\nGraphicsError: The", "self.p1[1] self.min_width = None self.min_height = None self.max_width = None", "\"\\n\\nThe center argument for resizing the object (set_outline_width) needs to", "Rectangle fill must be a Colour object , not {fill}\")", "False else: if (self.p1[0] < mouse_pos[0] < self.p2[0] or self.p1[0]", "make sure that the p2 is 'after' p1, ie the", "for resizing the object (set_outline_width) needs to be one of", "self.p1[1], self.p2[1] = self.p2[1], self.p1[1] self.anchor = [(self.p1[0] + self.p2[0])", "- self.height)) return self def set_fill(self, fill): if fill is", "2 - self.width)) return self def set_height(self, height, center=\"center\"): if", "raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The Rectangle fill must be", "if self in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self) elif self not in GraphicsObject.resizing_objects:", "not {outline}\") if outline_width is None: self.outline_width = STYLES[\"default\"][\"width\"] elif", "mouse_pos[1] > self.p2[1]): return True else: return False else: return", "BBox(GraphicsObject): # Internal base class for objects represented by bounding", "than or equal to the p1 value self.width = p2[0]", "{fill}\") if outline is None: self.outline = STYLES[\"default\"][\"outline\"] elif isinstance(outline,", "must be a Colour object , not {fill}\") self._update_layer() return", "a Colour object , not {fill}\") self._update_layer() return self def", "else: if (self.p1[0] < mouse_pos[0] < self.p2[0] or self.p1[0] >", "return self.outline_width def get_anchor(self): return self.anchor def set_dimensions(self, width, height,", "if outline is None: self.outline = STYLES[\"default\"][\"outline\"] elif isinstance(outline, Colour):", "dx self.p2[1] += dy self.anchor[0] += dx self.anchor[1] += dy", "min_height self.is_resizing = {\"top\": top, \"left\": left, \"bottom\": bottom, \"right\":", "return self.p1.copy() def get_p2(self): return self.p2.copy() def get_top_right(self): return self.p1.copy()", "p1, p2): self.p1 = p1.copy() self.p2 = p2.copy() # These", "self.p2[0] - self.p1[0] self.height = self.p2[1] - self.p1[1] width_scale =", "= p2[0] - p1[0] self.height = p2[1] - p1[1] self.anchor", "box # (opposite corners) Line segment is a degenerate case.", "height and width of resizable object must be greater than", "self.p2) else: self.set_coords(self.p1.add_x(-(width / 2 - self.width)), self.p2.add_x(width / 2", "= fill else: # If not, raise an error raise", "if center not in {\"center\", \"right\", \"left\"}: raise GraphicsError( \"\\n\\nThe", "self.p2.add_x(width / 2 - self.width)) return self def set_height(self, height,", "mouse_pos[1] < self.p2[1] or self.p1[1] > mouse_pos[1] > self.p2[1]): return", "self def set_width(self, width, center=\"center\"): if center not in {\"center\",", "\"right\": right} self._set_resizable([top, bottom, left, right], top_bounds=top_bounds, bottom_bounds=bottom_bounds, left_bounds=left_bounds, right_bounds=right_bounds,", "Colour object , not {outline}\") self._update_layer() return self def set_outline_width(self,", "mouse_pos is None: return False else: if (self.p1[0] < mouse_pos[0]", "p1[1] self.anchor = [(self.p1[0] + self.p2[0]) // 2, (self.p1[1] +", "center argument for resizing the object (set_outline_width) needs to be", "right_bounds=None): if min_width < 1 or min_height < 1: raise", "= self.p2[1] - self.p1[1] self.min_width = None self.min_height = None", "p2 value is always greater than or equal to the", "(self.p1[1] + self.p2[1]) // 2] self._update_layer() return self def set_width(self,", "def set_coords(self, p1, p2): self.p1 = p1.copy() self.p2 = p2.copy()", "self.p1[1], self.p2[1] = self.p2[1], self.p1[1] # abs(p2[0] - p1[0]) is", "a Colour object , not {fill}\") if outline is None:", "top is False and bottom is False and left is", "/ self.height # abs(p2[0] - p1[0]) is not required because", "self.width)), self.p2) else: self.set_coords(self.p1.add_x(-(width / 2 - self.width)), self.p2.add_x(width /", "def set_resizable(self, top=False, left=False, bottom=False, right=False, min_width=40, min_height=40, bounds_width=10, top_bounds=None,", "dx self.anchor[1] += dy def is_clicked(self, mouse_pos): if self.bounds is", "self.p2[1] or self.p1[1] > mouse_pos[1] > self.p2[1]): return True else:", "Checking if p1's y value is greater than p2's. If", "the values self.p1[1], self.p2[1] = self.p2[1], self.p1[1] self.anchor = [(self.p1[0]", ", not {fill}\") self._update_layer() return self def set_outline(self, outline): if", "right_bounds=right_bounds, thickness=bounds_width) if top is False and bottom is False", "= p2 # These make sure that the p2 is", "base class for objects represented by bounding box # (opposite", "outline_width is None: self.outline_width = STYLES[\"default\"][\"width\"] elif isinstance(outline_width, int): #", "not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The Rectangle fill must", "def set_dimensions(self, width, height, horizontal_align=\"center\", vertical_align=\"center\"): self.set_width(width, horizontal_align) self.set_height(height, vertical_align)", "None: if mouse_pos is None: return False else: if (self.p1[0]", "center not in {\"center\", \"top\", \"bottom\"}: raise GraphicsError( \"\\n\\nThe center", "self.outline_width def get_anchor(self): return self.anchor def set_dimensions(self, width, height, horizontal_align=\"center\",", "f'{[\"center\", \"top\", \"bottom\"]}') if center == \"top\": self.set_coords(self.p1, self.p2.add_y(height -", "raise GraphicsError(f\"\\n\\nGraphicsError: Minimum height and width of resizable object must", "be a Colour object , not {outline}\") self._update_layer() return self", "bounds=bounds, tag=tag) # abs(p2[0] - p1[0]) is not required because", "self.set_height(height, vertical_align) return self def set_resizable(self, top=False, left=False, bottom=False, right=False,", "greater than \" f\"or equal to 1. Right now, min_width={min_width}", "p1 value self.width = self.p2[0] - self.p1[0] self.height = self.p2[1]", "GraphicsError(f\"\\n\\nGraphicsError: The Rectangle fill must be a Colour object ,", "p2[0] - p1[0] self.height = p2[1] - p1[1] self.anchor =", "self.p1[1]] def get_bottom_left(self): return [self.p1[0], self.p2[1]] def get_bottom_right(self): return self.p2.copy()", "2 - self.height)) return self def set_fill(self, fill): if fill", "\"bottom\"]}') if center == \"top\": self.set_coords(self.p1, self.p2.add_y(height - self.height)) elif", "greater than p2's. If so, then swap the values self.p1[0],", "colour self.fill = fill else: # If not, raise an", "__init__(self, p1, p2, bounds=None, fill=None, outline=None, outline_width=None, cursor=\"arrow\", layer=0, tag=None):", "== \"left\": self.set_coords(self.p1, self.p2.add_x(width - self.width)) elif center == \"right\":", "# (opposite corners) Line segment is a degenerate case. resizing_objects", "bottom_bounds=bottom_bounds, left_bounds=left_bounds, right_bounds=right_bounds, thickness=bounds_width) if top is False and bottom", "fill): if fill is None: self.fill = STYLES[\"default\"][\"fill\"] elif isinstance(fill,", "If not, raise an error raise GraphicsError( f\"\\n\\nGraphicsError: The rectangle", "self._update_layer() return self def set_width(self, width, center=\"center\"): if center not", "import * class BBox(GraphicsObject): # Internal base class for objects", "to the p1 value self.width = self.p2[0] - self.p1[0] self.height", "> mouse_pos[1] > self.p2[1]): return True else: return False else:", "= STYLES[\"default\"][\"width\"] elif isinstance(outline_width, int): # Checking if the option", "dx, dy): self.p1[0] += dx self.p1[1] += dy self.p2[0] +=", "min_width={min_width} & min_height={min_height}\") self.min_width = min_width self.min_height = min_height self.is_resizing", "a Colour object , not {outline}\") self._update_layer() return self def", "not in {\"center\", \"top\", \"bottom\"}: raise GraphicsError( \"\\n\\nThe center argument", "self.p1 = p1.copy() self.p2 = p2.copy() # These make sure", "object , not {outline}\") if outline_width is None: self.outline_width =", "height_scale = (p2[1] - p1[1]) / self.height # abs(p2[0] -", "or min_height < 1: raise GraphicsError(f\"\\n\\nGraphicsError: Minimum height and width", "elif center == \"right\": self.set_coords(self.p1.add_x(-(width - self.width)), self.p2) else: self.set_coords(self.p1.add_x(-(width", "None: self.outline_width = STYLES[\"default\"][\"width\"] elif isinstance(outline_width, int): # Checking if", "{\"top\": top, \"left\": left, \"bottom\": bottom, \"right\": right} self._set_resizable([top, bottom,", "resizing the object (set_height) needs to be one of \"", "self.is_resizing = {} self.bounds_thickness = 0 if fill is None:", "# Checking if p1's y value is greater than p2's.", "not {fill}\") self._update_layer() return self def set_outline(self, outline): if outline", "rectangle outline must be a Colour object , not {outline}\")", "is greater than p2's. If so, then swap the values", "left=False, bottom=False, right=False, min_width=40, min_height=40, bounds_width=10, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None):", "self.p2[1]) // 2] self._update_layer() return self def set_width(self, width, center=\"center\"):", "and \\ (self.p1[1] < mouse_pos[1] < self.p2[1] or self.p1[1] >", "self.anchor = [(self.p1[0] + self.p2[0]) // 2, (self.p1[1] + self.p2[1])", "outline is None: self.outline = STYLES[\"default\"][\"outline\"] elif isinstance(outline, Colour): #", "mouse_pos[0] < self.p2[0] or self.p1[0] > mouse_pos[0] > self.p2[0]) and", "in subclasses\"\"\" pass def _move(self, dx, dy): self.p1[0] += dx", "(set_height) needs to be one of \" f'{[\"center\", \"top\", \"bottom\"]}')", "is False and left is False and right is False:", "in {\"center\", \"right\", \"left\"}: raise GraphicsError( \"\\n\\nThe center argument for", "raise an error raise GraphicsError( f\"\\n\\nGraphicsError: The rectangle outline width", "+ self.p2[0]) // 2, (self.p1[1] + self.p2[1]) // 2] self._update_layer()", "cursor=\"arrow\", layer=0, tag=None): self.p1 = p1 self.p2 = p2 #", "required because the p2 value is always greater than or", "raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline must be a Colour object", "If so, then swap the values self.p1[1], self.p2[1] = self.p2[1],", "rectangle outline width must be an integer, not {outline_width}\") def", "Colour): # Checking if the option is a colour self.outline", "p1, ie the x & y value of p2 is", "width must be an integer, not {outline_width}\") def __repr__(self): return", "None: self.outline = STYLES[\"default\"][\"outline\"] elif isinstance(outline, Colour): # Checking if", "get_height(self): return self.height def get_fill(self): return self.fill def get_outline(self): return", "= None self.max_height = None self.resizing_bounds = {} self.is_resizing =", "raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline width must be an integer,", "self.p2[1] - self.p1[1] self.min_width = None self.min_height = None self.max_width", "self.width)) elif center == \"right\": self.set_coords(self.p1.add_x(-(width - self.width)), self.p2) else:", "def get_left(self): return [self.p1[0], (self.p1[1] + self.p2[1]) / 2] def", "\"top\": self.set_coords(self.p1, self.p2.add_y(height - self.height)) elif center == \"bottom\": self.set_coords(self.p1.add_y(-(height", "class BBox(GraphicsObject): # Internal base class for objects represented by", "2] GraphicsObject.__init__(self, options=(), cursor=cursor, layer=layer, bounds=bounds, tag=tag) # abs(p2[0] -", "- p1[1] self.anchor = [(self.p1[0] + self.p2[0]) // 2, (self.p1[1]", "dy): self.p1[0] += dx self.p1[1] += dy self.p2[0] += dx", "width, height, horizontal_align=\"center\", vertical_align=\"center\"): self.set_width(width, horizontal_align) self.set_height(height, vertical_align) return self", "self.p2.copy() def get_top(self): return [(self.p2[0] + self.p1[0]) / 2, self.p1[1]]", "must be an integer, not {outline_width}\") def __repr__(self): return \"_BBox\"", "GraphicsObject.resizing_objects.remove(self) elif self not in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self) self.bounds_thickness = bounds_width", "\"left\"}: raise GraphicsError( \"\\n\\nThe center argument for resizing the object", "None: return False else: if (self.p1[0] < mouse_pos[0] < self.p2[0]", "if (self.p1[0] < mouse_pos[0] < self.p2[0] or self.p1[0] > mouse_pos[0]", "get_top(self): return [(self.p2[0] + self.p1[0]) / 2, self.p1[1]] def get_bottom(self):", "self.p2[1] - self.p1[1] width_scale = (p2[0] - p1[0]) / self.width", "self.p2[0], self.p1[0] if self.p1[1] > self.p2[1]: # Checking if p1's", "be greater than \" f\"or equal to 1. Right now,", "return self def set_outline_width(self, outline_width): if outline_width is None: self.outline_width", "Checking if the option is a colour self.outline = outline", "center == \"left\": self.set_coords(self.p1, self.p2.add_x(width - self.width)) elif center ==", "elif self not in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self) self.bounds_thickness = bounds_width return", "\"bottom\"}: raise GraphicsError( \"\\n\\nThe center argument for resizing the object", "outline must be a Colour object , not {outline}\") self._update_layer()", "raise GraphicsError(f\"\\n\\nGraphicsError: The Rectangle fill must be a Colour object", "if p1's x value is greater than p2's. If so,", "if fill is None: self.fill = STYLES[\"default\"][\"fill\"] elif isinstance(fill, Colour):", "height, horizontal_align=\"center\", vertical_align=\"center\"): self.set_width(width, horizontal_align) self.set_height(height, vertical_align) return self def", "is a degenerate case. resizing_objects = [] def __init__(self, p1,", "= p2[1] - p1[1] self.anchor = [(self.p1[0] + self.p2[0]) //", "else: return self.bounds.is_clicked(mouse_pos) def get_p1(self): return self.p1.copy() def get_p2(self): return", "then swap the values self.p1[0], self.p2[0] = self.p2[0], self.p1[0] if", "> self.p2[0]: # Checking if p1's x value is greater", "if p1's y value is greater than p2's. If so,", "self.min_width = min_width self.min_height = min_height self.is_resizing = {\"top\": top,", "= min_height self.is_resizing = {\"top\": top, \"left\": left, \"bottom\": bottom,", "resizing_objects = [] def __init__(self, p1, p2, bounds=None, fill=None, outline=None,", "self def set_resizable(self, top=False, left=False, bottom=False, right=False, min_width=40, min_height=40, bounds_width=10,", "/ 2] def get_width(self): return self.width def get_height(self): return self.height", "GraphicsObject from goopylib.styles import * class BBox(GraphicsObject): # Internal base", "self.height = self.p2[1] - self.p1[1] width_scale = (p2[0] - p1[0])", "+= dx self.p2[1] += dy self.anchor[0] += dx self.anchor[1] +=", "self.p1[0] += dx self.p1[1] += dy self.p2[0] += dx self.p2[1]", "== \"bottom\": self.set_coords(self.p1.add_y(-(height - self.height)), self.p2) else: self.set_coords(self.p1.add_y(-(height / 2", "= self.p2[0] - self.p1[0] self.height = self.p2[1] - self.p1[1] self.min_width", "self.p2[1]: # Checking if p1's y value is greater than", "y value is greater than p2's. If so, then swap", "isinstance(outline_width, int): # Checking if the option is an integer", "is False: if self in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self) elif self not", "outline_width else: # If not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError:", "thickness=bounds_width) if top is False and bottom is False and", "outline_width): if outline_width is None: self.outline_width = STYLES[\"default\"][\"width\"] elif isinstance(outline_width,", "option is a colour self.outline = outline else: # If", "of \" f'{[\"center\", \"right\", \"left\"]}') if center == \"left\": self.set_coords(self.p1,", "return \"_BBox\" def _set_resizable(self, resizables, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None, thickness=10):", "# Internal base class for objects represented by bounding box", "fill else: # If not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError:", "None self.min_height = None self.max_width = None self.max_height = None", "is a colour self.fill = fill else: # If not,", "self.p2[0] += dx self.p2[1] += dy self.anchor[0] += dx self.anchor[1]", "self.set_coords(self.p1.add_x(-(width / 2 - self.width)), self.p2.add_x(width / 2 - self.width))", "return [self.p1[0], self.p2[1]] def get_bottom_right(self): return self.p2.copy() def get_top(self): return", "get_top_right(self): return self.p1.copy() def get_top_left(self): return [self.p2[0], self.p1[1]] def get_bottom_left(self):", "These make sure that the p2 is 'after' p1, ie", "self.bounds.is_clicked(mouse_pos) def get_p1(self): return self.p1.copy() def get_p2(self): return self.p2.copy() def", "p1.copy() self.p2 = p2.copy() # These make sure that the", "self.p2.add_y(height / 2 - self.height)) return self def set_fill(self, fill):", "self.p2[1], self.p1[1] # abs(p2[0] - p1[0]) is not required because", "# These make sure that the p2 is 'after' p1,", "\"left\": self.set_coords(self.p1, self.p2.add_x(width - self.width)) elif center == \"right\": self.set_coords(self.p1.add_x(-(width", "= None self.max_width = None self.max_height = None self.resizing_bounds =", "center == \"right\": self.set_coords(self.p1.add_x(-(width - self.width)), self.p2) else: self.set_coords(self.p1.add_x(-(width /", "p2 is 'after' p1, ie the x & y value", "isinstance(fill, Colour): # Checking if the option is a colour", "p1's y value is greater than p2's. If so, then", "min_height={min_height}\") self.min_width = min_width self.min_height = min_height self.is_resizing = {\"top\":", "2, (self.p1[1] + self.p2[1]) // 2] self._update_layer() return self def", "left is False and right is False: if self in", "not {fill}\") if outline is None: self.outline = STYLES[\"default\"][\"outline\"] elif", "and left is False and right is False: if self", "= p1 self.p2 = p2 # These make sure that", "layer=layer, bounds=bounds, tag=tag) # abs(p2[0] - p1[0]) is not required", "+= dx self.anchor[1] += dy def is_clicked(self, mouse_pos): if self.bounds", "(self.p1[1] < mouse_pos[1] < self.p2[1] or self.p1[1] > mouse_pos[1] >", "= p1.copy() self.p2 = p2.copy() # These make sure that", "= 0 if fill is None: self.fill = STYLES[\"default\"][\"fill\"] elif", "self.outline_width = STYLES[\"default\"][\"width\"] elif isinstance(outline_width, int): # Checking if the", "(set_outline_width) needs to be one of \" f'{[\"center\", \"right\", \"left\"]}')", "/ 2 - self.height)), self.p2.add_y(height / 2 - self.height)) return", "> self.p2[1]): return True else: return False else: return self.bounds.is_clicked(mouse_pos)", "set_resizable(self, top=False, left=False, bottom=False, right=False, min_width=40, min_height=40, bounds_width=10, top_bounds=None, bottom_bounds=None,", "1 or min_height < 1: raise GraphicsError(f\"\\n\\nGraphicsError: Minimum height and", "return self.p2.copy() def get_top_right(self): return self.p1.copy() def get_top_left(self): return [self.p2[0],", "of resizable object must be greater than \" f\"or equal", "outline must be a Colour object , not {outline}\") if", "of \" f'{[\"center\", \"top\", \"bottom\"]}') if center == \"top\": self.set_coords(self.p1,", "True else: return False else: return self.bounds.is_clicked(mouse_pos) def get_p1(self): return", "an error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline must be a", "self def set_coords(self, p1, p2): self.p1 = p1.copy() self.p2 =", "/ 2] def get_right(self): return [self.p2[0], (self.p1[1] + self.p2[1]) /", "self.p2[1] = self.p2[1], self.p1[1] # abs(p2[0] - p1[0]) is not", "elif isinstance(outline, Colour): # Checking if the option is a", "The rectangle outline must be a Colour object , not", "GraphicsError( \"\\n\\nThe center argument for resizing the object (set_outline_width) needs", "is None: self.outline_width = STYLES[\"default\"][\"width\"] elif isinstance(outline_width, int): # Checking", "the p2 is 'after' p1, ie the x & y", "\"right\", \"left\"}: raise GraphicsError( \"\\n\\nThe center argument for resizing the", "top=False, left=False, bottom=False, right=False, min_width=40, min_height=40, bounds_width=10, top_bounds=None, bottom_bounds=None, left_bounds=None,", "not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline width", "Checking if p1's x value is greater than p2's. If", "self.anchor def set_dimensions(self, width, height, horizontal_align=\"center\", vertical_align=\"center\"): self.set_width(width, horizontal_align) self.set_height(height,", "self.p1[0] self.height = self.p2[1] - self.p1[1] self.min_width = None self.min_height", "object (set_height) needs to be one of \" f'{[\"center\", \"top\",", "return self.fill def get_outline(self): return self.outline def get_outline_width(self): return self.outline_width", "self.set_width(width, horizontal_align) self.set_height(height, vertical_align) return self def set_resizable(self, top=False, left=False,", "left_bounds=None, right_bounds=None, thickness=10): \"\"\"Override in subclasses\"\"\" pass def _move(self, dx,", "- self.width)) elif center == \"right\": self.set_coords(self.p1.add_x(-(width - self.width)), self.p2)", "def _move(self, dx, dy): self.p1[0] += dx self.p1[1] += dy", "\" f'{[\"center\", \"top\", \"bottom\"]}') if center == \"top\": self.set_coords(self.p1, self.p2.add_y(height", "bounds_width=10, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None): if min_width < 1 or", "If not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline", "vertical_align) return self def set_resizable(self, top=False, left=False, bottom=False, right=False, min_width=40,", "not, raise an error raise GraphicsError( f\"\\n\\nGraphicsError: The rectangle outline", "if self.p1[0] > self.p2[0]: # Checking if p1's x value", "- p1[0] self.height = p2[1] - p1[1] self.anchor = [(self.p1[0]", "{} self.bounds_thickness = 0 if fill is None: self.fill =", "is a colour self.outline = outline else: # If not,", "the option is a colour self.fill = fill else: #", "fill=None, outline=None, outline_width=None, cursor=\"arrow\", layer=0, tag=None): self.p1 = p1 self.p2", "is False and right is False: if self in GraphicsObject.resizing_objects:", "vertical_align=\"center\"): self.set_width(width, horizontal_align) self.set_height(height, vertical_align) return self def set_resizable(self, top=False,", "STYLES[\"default\"][\"outline\"] elif isinstance(outline, Colour): # Checking if the option is", "center=\"center\"): if center not in {\"center\", \"top\", \"bottom\"}: raise GraphicsError(", "if center not in {\"center\", \"top\", \"bottom\"}: raise GraphicsError( \"\\n\\nThe", "elif isinstance(outline_width, int): # Checking if the option is an", "corners) Line segment is a degenerate case. resizing_objects = []", "self.p2[1]): return True else: return False else: return self.bounds.is_clicked(mouse_pos) def", "self.p2.copy() def get_top_right(self): return self.p1.copy() def get_top_left(self): return [self.p2[0], self.p1[1]]", "[self.p2[0], self.p1[1]] def get_bottom_left(self): return [self.p1[0], self.p2[1]] def get_bottom_right(self): return", "self.p2.add_y(height - self.height)) elif center == \"bottom\": self.set_coords(self.p1.add_y(-(height - self.height)),", "right_bounds=None, thickness=10): \"\"\"Override in subclasses\"\"\" pass def _move(self, dx, dy):", "represented by bounding box # (opposite corners) Line segment is", "The Rectangle fill must be a Colour object , not", "def _set_resizable(self, resizables, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None, thickness=10): \"\"\"Override in", "def get_bottom_left(self): return [self.p1[0], self.p2[1]] def get_bottom_right(self): return self.p2.copy() def", "if top is False and bottom is False and left", "self.p1[0] self.height = self.p2[1] - self.p1[1] width_scale = (p2[0] -", "rectangle outline width must be an integer, not {outline_width}\") self._update_layer()", "is None: if mouse_pos is None: return False else: if", "self.height = p2[1] - p1[1] self.anchor = [(self.p1[0] + self.p2[0])", "None self.resizing_bounds = {} self.is_resizing = {} self.bounds_thickness = 0", "& min_height={min_height}\") self.min_width = min_width self.min_height = min_height self.is_resizing =", "self.set_coords(self.p1.add_y(-(height / 2 - self.height)), self.p2.add_y(height / 2 - self.height))", "to the p1 value self.width = p2[0] - p1[0] self.height", "\"top\", \"bottom\"]}') if center == \"top\": self.set_coords(self.p1, self.p2.add_y(height - self.height))", "greater than or equal to the p1 value self.width =", "False and right is False: if self in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self)", "def __init__(self, p1, p2, bounds=None, fill=None, outline=None, outline_width=None, cursor=\"arrow\", layer=0,", "be one of \" f'{[\"center\", \"top\", \"bottom\"]}') if center ==", "self.p1 = p1 self.p2 = p2 # These make sure", "values self.p1[1], self.p2[1] = self.p2[1], self.p1[1] # abs(p2[0] - p1[0])", "return self def set_resizable(self, top=False, left=False, bottom=False, right=False, min_width=40, min_height=40,", "def set_height(self, height, center=\"center\"): if center not in {\"center\", \"top\",", "layer=0, tag=None): self.p1 = p1 self.p2 = p2 # These", "p1 self.p2 = p2 # These make sure that the", "GraphicsError( \"\\n\\nThe center argument for resizing the object (set_height) needs", "p1's x value is greater than p2's. If so, then", "return False else: return self.bounds.is_clicked(mouse_pos) def get_p1(self): return self.p1.copy() def", "width, center=\"center\"): if center not in {\"center\", \"right\", \"left\"}: raise", "def get_right(self): return [self.p2[0], (self.p1[1] + self.p2[1]) / 2] def", "dy self.anchor[0] += dx self.anchor[1] += dy def is_clicked(self, mouse_pos):", "* class BBox(GraphicsObject): # Internal base class for objects represented", "an error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline width must be", "\"top\", \"bottom\"}: raise GraphicsError( \"\\n\\nThe center argument for resizing the", "p2's. If so, then swap the values self.p1[1], self.p2[1] =", "GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline must be a Colour object ,", "/ 2 - self.height)) return self def set_fill(self, fill): if", "The rectangle outline width must be an integer, not {outline_width}\")", "bottom is False and left is False and right is", "resizables, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None, thickness=10): \"\"\"Override in subclasses\"\"\" pass", "needs to be one of \" f'{[\"center\", \"top\", \"bottom\"]}') if", "self.p2 = p2 # These make sure that the p2", "self.p2[1]] def get_bottom_right(self): return self.p2.copy() def get_top(self): return [(self.p2[0] +", "set_outline(self, outline): if outline is None: self.outline = STYLES[\"default\"][\"outline\"] elif", "+ self.p2[1]) // 2] GraphicsObject.__init__(self, options=(), cursor=cursor, layer=layer, bounds=bounds, tag=tag)", "// 2, (self.p1[1] + self.p2[1]) // 2] self._update_layer() return self", "1. Right now, min_width={min_width} & min_height={min_height}\") self.min_width = min_width self.min_height", "object , not {fill}\") if outline is None: self.outline =", "self.set_coords(self.p1, self.p2.add_x(width - self.width)) elif center == \"right\": self.set_coords(self.p1.add_x(-(width -", "[self.p1[0], self.p2[1]] def get_bottom_right(self): return self.p2.copy() def get_top(self): return [(self.p2[0]", "an integer, not {outline_width}\") def __repr__(self): return \"_BBox\" def _set_resizable(self,", "= {\"top\": top, \"left\": left, \"bottom\": bottom, \"right\": right} self._set_resizable([top,", "self.p2[0]) // 2, (self.p1[1] + self.p2[1]) // 2] self._update_layer() return", "Line segment is a degenerate case. resizing_objects = [] def", "2] self._update_layer() return self def set_width(self, width, center=\"center\"): if center", "self not in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self) self.bounds_thickness = bounds_width return self", "= STYLES[\"default\"][\"fill\"] elif isinstance(fill, Colour): # Checking if the option", "get_fill(self): return self.fill def get_outline(self): return self.outline def get_outline_width(self): return", "values self.p1[1], self.p2[1] = self.p2[1], self.p1[1] self.anchor = [(self.p1[0] +", "width_scale = (p2[0] - p1[0]) / self.width height_scale = (p2[1]", "self.height)), self.p2.add_y(height / 2 - self.height)) return self def set_fill(self,", "get_right(self): return [self.p2[0], (self.p1[1] + self.p2[1]) / 2] def get_width(self):", "abs(p2[0] - p1[0]) is not required because the p2 value", "\"right\": self.set_coords(self.p1.add_x(-(width - self.width)), self.p2) else: self.set_coords(self.p1.add_x(-(width / 2 -", "p2): self.p1 = p1.copy() self.p2 = p2.copy() # These make", "set_width(self, width, center=\"center\"): if center not in {\"center\", \"right\", \"left\"}:", "self.p2[0]) // 2, (self.p1[1] + self.p2[1]) // 2] GraphicsObject.__init__(self, options=(),", "+ self.p2[1]) // 2] self._update_layer() return self def set_width(self, width,", "else: self.set_coords(self.p1.add_x(-(width / 2 - self.width)), self.p2.add_x(width / 2 -", "+= dx self.p1[1] += dy self.p2[0] += dx self.p2[1] +=", "self.anchor[1] += dy def is_clicked(self, mouse_pos): if self.bounds is None:", "self.p1[1] > mouse_pos[1] > self.p2[1]): return True else: return False", "value self.width = self.p2[0] - self.p1[0] self.height = self.p2[1] -", "top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None): if min_width < 1 or min_height", "(self.p1[1] + self.p2[1]) // 2] GraphicsObject.__init__(self, options=(), cursor=cursor, layer=layer, bounds=bounds,", "is 'after' p1, ie the x & y value of", "mouse_pos): if self.bounds is None: if mouse_pos is None: return", "or self.p1[1] > mouse_pos[1] > self.p2[1]): return True else: return", "+ self.p2[1]) / 2] def get_right(self): return [self.p2[0], (self.p1[1] +", "not {outline}\") self._update_layer() return self def set_outline_width(self, outline_width): if outline_width", "def set_outline_width(self, outline_width): if outline_width is None: self.outline_width = STYLES[\"default\"][\"width\"]", "integer self.outline_width = outline_width else: # If not, raise an", "Checking if the option is a colour self.fill = fill", "[(self.p1[0] + self.p2[0]) // 2, (self.p1[1] + self.p2[1]) // 2]", "the p1 value self.width = self.p2[0] - self.p1[0] self.height =", "self.resizing_bounds = {} self.is_resizing = {} self.bounds_thickness = 0 if", "horizontal_align) self.set_height(height, vertical_align) return self def set_resizable(self, top=False, left=False, bottom=False,", "= [] def __init__(self, p1, p2, bounds=None, fill=None, outline=None, outline_width=None,", "the x & y value of p2 is greater than", "self.p1[0]) / 2, self.p2[1]] def get_left(self): return [self.p1[0], (self.p1[1] +", "needs to be one of \" f'{[\"center\", \"right\", \"left\"]}') if", "def get_height(self): return self.height def get_fill(self): return self.fill def get_outline(self):", "= min_width self.min_height = min_height self.is_resizing = {\"top\": top, \"left\":", "than \" f\"or equal to 1. Right now, min_width={min_width} &", "if center == \"left\": self.set_coords(self.p1, self.p2.add_x(width - self.width)) elif center", "return self def set_height(self, height, center=\"center\"): if center not in", "def set_width(self, width, center=\"center\"): if center not in {\"center\", \"right\",", "None self.max_height = None self.resizing_bounds = {} self.is_resizing = {}", "left, \"bottom\": bottom, \"right\": right} self._set_resizable([top, bottom, left, right], top_bounds=top_bounds,", "get_width(self): return self.width def get_height(self): return self.height def get_fill(self): return", "self.p2[0] - self.p1[0] self.height = self.p2[1] - self.p1[1] self.min_width =", "def get_outline(self): return self.outline def get_outline_width(self): return self.outline_width def get_anchor(self):", "greater than p2's. If so, then swap the values self.p1[1],", "goopylib.objects.GraphicsObject import GraphicsObject from goopylib.styles import * class BBox(GraphicsObject): #", "self.p1[1] # abs(p2[0] - p1[0]) is not required because the", "self.p2[0]) and \\ (self.p1[1] < mouse_pos[1] < self.p2[1] or self.p1[1]", "self._update_layer() return self def set_outline_width(self, outline_width): if outline_width is None:", "return [(self.p2[0] + self.p1[0]) / 2, self.p1[1]] def get_bottom(self): return", "mouse_pos[0] > self.p2[0]) and \\ (self.p1[1] < mouse_pos[1] < self.p2[1]", "self.p1[1]] def get_bottom(self): return [(self.p2[0] + self.p1[0]) / 2, self.p2[1]]", "bottom, \"right\": right} self._set_resizable([top, bottom, left, right], top_bounds=top_bounds, bottom_bounds=bottom_bounds, left_bounds=left_bounds,", "equal to the p1 value self.width = p2[0] - p1[0]", "colour self.outline = outline else: # If not, raise an", "self.min_height = min_height self.is_resizing = {\"top\": top, \"left\": left, \"bottom\":", "min_height < 1: raise GraphicsError(f\"\\n\\nGraphicsError: Minimum height and width of", "top, \"left\": left, \"bottom\": bottom, \"right\": right} self._set_resizable([top, bottom, left,", "0 if fill is None: self.fill = STYLES[\"default\"][\"fill\"] elif isinstance(fill,", "\"left\"]}') if center == \"left\": self.set_coords(self.p1, self.p2.add_x(width - self.width)) elif", "center=\"center\"): if center not in {\"center\", \"right\", \"left\"}: raise GraphicsError(", "< mouse_pos[1] < self.p2[1] or self.p1[1] > mouse_pos[1] > self.p2[1]):", "values self.p1[0], self.p2[0] = self.p2[0], self.p1[0] if self.p1[1] > self.p2[1]:", "outline width must be an integer, not {outline_width}\") def __repr__(self):", "def get_bottom(self): return [(self.p2[0] + self.p1[0]) / 2, self.p2[1]] def", "2, self.p1[1]] def get_bottom(self): return [(self.p2[0] + self.p1[0]) / 2,", "center not in {\"center\", \"right\", \"left\"}: raise GraphicsError( \"\\n\\nThe center", "self def set_fill(self, fill): if fill is None: self.fill =", "bottom=False, right=False, min_width=40, min_height=40, bounds_width=10, top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None): if", "= self.p2[0], self.p1[0] if self.p1[1] > self.p2[1]: # Checking if", "self.bounds_thickness = bounds_width return self def set_coords(self, p1, p2): self.p1", "self.height)) return self def set_fill(self, fill): if fill is None:", "segment is a degenerate case. resizing_objects = [] def __init__(self,", "and width of resizable object must be greater than \"", "self.p2[1]) // 2] GraphicsObject.__init__(self, options=(), cursor=cursor, layer=layer, bounds=bounds, tag=tag) #", "+= dy self.anchor[0] += dx self.anchor[1] += dy def is_clicked(self,", "def get_fill(self): return self.fill def get_outline(self): return self.outline def get_outline_width(self):", "integer, not {outline_width}\") def __repr__(self): return \"_BBox\" def _set_resizable(self, resizables,", "self.min_width = None self.min_height = None self.max_width = None self.max_height", "Internal base class for objects represented by bounding box #", "< mouse_pos[0] < self.p2[0] or self.p1[0] > mouse_pos[0] > self.p2[0])", "self._update_layer() return self def set_outline(self, outline): if outline is None:", "self.p2[1], self.p1[1] self.anchor = [(self.p1[0] + self.p2[0]) // 2, (self.p1[1]", "if outline_width is None: self.outline_width = STYLES[\"default\"][\"width\"] elif isinstance(outline_width, int):", "not in {\"center\", \"right\", \"left\"}: raise GraphicsError( \"\\n\\nThe center argument", "self.p2.add_x(width - self.width)) elif center == \"right\": self.set_coords(self.p1.add_x(-(width - self.width)),", "> self.p2[0]) and \\ (self.p1[1] < mouse_pos[1] < self.p2[1] or", "get_top_left(self): return [self.p2[0], self.p1[1]] def get_bottom_left(self): return [self.p1[0], self.p2[1]] def", "a Colour object , not {outline}\") if outline_width is None:", "min_width self.min_height = min_height self.is_resizing = {\"top\": top, \"left\": left,", "/ 2, self.p2[1]] def get_left(self): return [self.p1[0], (self.p1[1] + self.p2[1])", "the object (set_outline_width) needs to be one of \" f'{[\"center\",", "= {} self.bounds_thickness = 0 if fill is None: self.fill", "def get_p1(self): return self.p1.copy() def get_p2(self): return self.p2.copy() def get_top_right(self):", "= self.p2[1] - self.p1[1] width_scale = (p2[0] - p1[0]) /", "always greater than or equal to the p1 value self.width", "top_bounds=None, bottom_bounds=None, left_bounds=None, right_bounds=None, thickness=10): \"\"\"Override in subclasses\"\"\" pass def", "p1, p2, bounds=None, fill=None, outline=None, outline_width=None, cursor=\"arrow\", layer=0, tag=None): self.p1", "{outline}\") if outline_width is None: self.outline_width = STYLES[\"default\"][\"width\"] elif isinstance(outline_width,", "\"\"\"Override in subclasses\"\"\" pass def _move(self, dx, dy): self.p1[0] +=", "< self.p2[1] or self.p1[1] > mouse_pos[1] > self.p2[1]): return True", "+ self.p1[0]) / 2, self.p1[1]] def get_bottom(self): return [(self.p2[0] +", "self.p1[1] self.anchor = [(self.p1[0] + self.p2[0]) // 2, (self.p1[1] +", "def get_anchor(self): return self.anchor def set_dimensions(self, width, height, horizontal_align=\"center\", vertical_align=\"center\"):", "resizing the object (set_outline_width) needs to be one of \"", "# Checking if the option is a colour self.outline =", "return [self.p2[0], self.p1[1]] def get_bottom_left(self): return [self.p1[0], self.p2[1]] def get_bottom_right(self):", "p2 is greater than that of p1 if self.p1[0] >", "set_outline_width(self, outline_width): if outline_width is None: self.outline_width = STYLES[\"default\"][\"width\"] elif", "to be one of \" f'{[\"center\", \"right\", \"left\"]}') if center", "return self.height def get_fill(self): return self.fill def get_outline(self): return self.outline", "swap the values self.p1[1], self.p2[1] = self.p2[1], self.p1[1] # abs(p2[0]", "raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline must be", "raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The rectangle outline width must", "self.p1[0] if self.p1[1] > self.p2[1]: # Checking if p1's y", "the values self.p1[0], self.p2[0] = self.p2[0], self.p1[0] if self.p1[1] >", "in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.remove(self) elif self not in GraphicsObject.resizing_objects: GraphicsObject.resizing_objects.add(self) self.bounds_thickness", "self.p2[1]) / 2] def get_width(self): return self.width def get_height(self): return", "else: self.set_coords(self.p1.add_y(-(height / 2 - self.height)), self.p2.add_y(height / 2 -", "self.width height_scale = (p2[1] - p1[1]) / self.height # abs(p2[0]", "2, (self.p1[1] + self.p2[1]) // 2] GraphicsObject.__init__(self, options=(), cursor=cursor, layer=layer,", "than p2's. If so, then swap the values self.p1[1], self.p2[1]", "bounding box # (opposite corners) Line segment is a degenerate", "if the option is a colour self.outline = outline else:", "- p1[0]) / self.width height_scale = (p2[1] - p1[1]) /", "= outline_width else: # If not, raise an error raise", "value self.width = p2[0] - p1[0] self.height = p2[1] -", "outline else: # If not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError:", "center == \"bottom\": self.set_coords(self.p1.add_y(-(height - self.height)), self.p2) else: self.set_coords(self.p1.add_y(-(height /", "else: # If not, raise an error raise GraphicsError(f\"\\n\\nGraphicsError: The", "if the option is an integer self.outline_width = outline_width else:", "p1[1]) / self.height # abs(p2[0] - p1[0]) is not required", "return self.width def get_height(self): return self.height def get_fill(self): return self.fill" ]
[ "False while queue: v = queue.pop(0) print(v, end=\" \") for", "dfs(i) def bfs(v): queue = [v] visit[v] = False while", "i in range(1, n+1): if visit[i] and graph[v][i]==1: queue.append(i) visit[i]", "= 1 graph[C][R] = 1 def dfs(v): visit[v] = True", "end=\" \") for i in range(1, n+1): if not visit[i]", "print(v, end=\" \") for i in range(1, n+1): if not", "for i in range(1, n+1): if not visit[i] and graph[v][i]==1:", "* (n+1) for _ in range(m): R, C = map(int,", "= False while queue: v = queue.pop(0) print(v, end=\" \")", "1 graph[C][R] = 1 def dfs(v): visit[v] = True print(v,", "= map(int, input().split()) graph[R][C] = 1 graph[C][R] = 1 def", "in range(n+1)] visit = [False] * (n+1) for _ in", "not visit[i] and graph[v][i]==1: dfs(i) def bfs(v): queue = [v]", "_ in range(m): R, C = map(int, input().split()) graph[R][C] =", "\") for i in range(1, n+1): if visit[i] and graph[v][i]==1:", "= 1 def dfs(v): visit[v] = True print(v, end=\" \")", "if not visit[i] and graph[v][i]==1: dfs(i) def bfs(v): queue =", "in range(1, n+1): if visit[i] and graph[v][i]==1: queue.append(i) visit[i] =", "= True print(v, end=\" \") for i in range(1, n+1):", "# https://www.acmicpc.net/problem/1260 n, m, v = map(int, input().split()) graph =", "* (n+1) for _ in range(n+1)] visit = [False] *", "= [v] visit[v] = False while queue: v = queue.pop(0)", "for _ in range(n+1)] visit = [False] * (n+1) for", "= [[0] * (n+1) for _ in range(n+1)] visit =", "while queue: v = queue.pop(0) print(v, end=\" \") for i", "queue.pop(0) print(v, end=\" \") for i in range(1, n+1): if", "visit[i] and graph[v][i]==1: dfs(i) def bfs(v): queue = [v] visit[v]", "print(v, end=\" \") for i in range(1, n+1): if visit[i]", "in range(1, n+1): if not visit[i] and graph[v][i]==1: dfs(i) def", "n, m, v = map(int, input().split()) graph = [[0] *", "[[0] * (n+1) for _ in range(n+1)] visit = [False]", "and graph[v][i]==1: dfs(i) def bfs(v): queue = [v] visit[v] =", "bfs(v): queue = [v] visit[v] = False while queue: v", "True print(v, end=\" \") for i in range(1, n+1): if", "m, v = map(int, input().split()) graph = [[0] * (n+1)", "map(int, input().split()) graph[R][C] = 1 graph[C][R] = 1 def dfs(v):", "visit[v] = True print(v, end=\" \") for i in range(1,", "map(int, input().split()) graph = [[0] * (n+1) for _ in", "graph[v][i]==1: dfs(i) def bfs(v): queue = [v] visit[v] = False", "queue = [v] visit[v] = False while queue: v =", "for _ in range(m): R, C = map(int, input().split()) graph[R][C]", "input().split()) graph = [[0] * (n+1) for _ in range(n+1)]", "R, C = map(int, input().split()) graph[R][C] = 1 graph[C][R] =", "range(n+1)] visit = [False] * (n+1) for _ in range(m):", "in range(m): R, C = map(int, input().split()) graph[R][C] = 1", "1 def dfs(v): visit[v] = True print(v, end=\" \") for", "graph[C][R] = 1 def dfs(v): visit[v] = True print(v, end=\"", "n+1): if visit[i] and graph[v][i]==1: queue.append(i) visit[i] = False dfs(v)", "def bfs(v): queue = [v] visit[v] = False while queue:", "visit[i] and graph[v][i]==1: queue.append(i) visit[i] = False dfs(v) print() bfs(v)", "visit[v] = False while queue: v = queue.pop(0) print(v, end=\"", "\") for i in range(1, n+1): if not visit[i] and", "i in range(1, n+1): if not visit[i] and graph[v][i]==1: dfs(i)", "range(1, n+1): if not visit[i] and graph[v][i]==1: dfs(i) def bfs(v):", "[v] visit[v] = False while queue: v = queue.pop(0) print(v,", "dfs(v): visit[v] = True print(v, end=\" \") for i in", "range(m): R, C = map(int, input().split()) graph[R][C] = 1 graph[C][R]", "= queue.pop(0) print(v, end=\" \") for i in range(1, n+1):", "if visit[i] and graph[v][i]==1: queue.append(i) visit[i] = False dfs(v) print()", "v = map(int, input().split()) graph = [[0] * (n+1) for", "queue: v = queue.pop(0) print(v, end=\" \") for i in", "def dfs(v): visit[v] = True print(v, end=\" \") for i", "= [False] * (n+1) for _ in range(m): R, C", "_ in range(n+1)] visit = [False] * (n+1) for _", "[False] * (n+1) for _ in range(m): R, C =", "n+1): if not visit[i] and graph[v][i]==1: dfs(i) def bfs(v): queue", "input().split()) graph[R][C] = 1 graph[C][R] = 1 def dfs(v): visit[v]", "for i in range(1, n+1): if visit[i] and graph[v][i]==1: queue.append(i)", "range(1, n+1): if visit[i] and graph[v][i]==1: queue.append(i) visit[i] = False", "(n+1) for _ in range(n+1)] visit = [False] * (n+1)", "(n+1) for _ in range(m): R, C = map(int, input().split())", "C = map(int, input().split()) graph[R][C] = 1 graph[C][R] = 1", "v = queue.pop(0) print(v, end=\" \") for i in range(1,", "end=\" \") for i in range(1, n+1): if visit[i] and", "graph[R][C] = 1 graph[C][R] = 1 def dfs(v): visit[v] =", "= map(int, input().split()) graph = [[0] * (n+1) for _", "graph = [[0] * (n+1) for _ in range(n+1)] visit", "https://www.acmicpc.net/problem/1260 n, m, v = map(int, input().split()) graph = [[0]", "visit = [False] * (n+1) for _ in range(m): R," ]
[ "0 or s[i-1] != c) and (i+1 == len(s) or", "def modifyString(self, s: str) -> str: s = list(s) for", "in range(len(s)): if s[i] == \"?\": for c in \"abc\":", "len(s) or s[i+1] != c): s[i] = c break return", "(i+1 == len(s) or s[i+1] != c): s[i] = c", "== len(s) or s[i+1] != c): s[i] = c break", "== \"?\": for c in \"abc\": if (i == 0", "for c in \"abc\": if (i == 0 or s[i-1]", "s[i] == \"?\": for c in \"abc\": if (i ==", "c) and (i+1 == len(s) or s[i+1] != c): s[i]", "== 0 or s[i-1] != c) and (i+1 == len(s)", "modifyString(self, s: str) -> str: s = list(s) for i", "for i in range(len(s)): if s[i] == \"?\": for c", "(i == 0 or s[i-1] != c) and (i+1 ==", "s: str) -> str: s = list(s) for i in", "in \"abc\": if (i == 0 or s[i-1] != c)", "or s[i-1] != c) and (i+1 == len(s) or s[i+1]", "s = list(s) for i in range(len(s)): if s[i] ==", "\"abc\": if (i == 0 or s[i-1] != c) and", "or s[i+1] != c): s[i] = c break return \"\".join(s)", "s[i-1] != c) and (i+1 == len(s) or s[i+1] !=", "!= c) and (i+1 == len(s) or s[i+1] != c):", "= list(s) for i in range(len(s)): if s[i] == \"?\":", "Solution: def modifyString(self, s: str) -> str: s = list(s)", "list(s) for i in range(len(s)): if s[i] == \"?\": for", "if s[i] == \"?\": for c in \"abc\": if (i", "if (i == 0 or s[i-1] != c) and (i+1", "class Solution: def modifyString(self, s: str) -> str: s =", "\"?\": for c in \"abc\": if (i == 0 or", "str) -> str: s = list(s) for i in range(len(s)):", "range(len(s)): if s[i] == \"?\": for c in \"abc\": if", "and (i+1 == len(s) or s[i+1] != c): s[i] =", "i in range(len(s)): if s[i] == \"?\": for c in", "str: s = list(s) for i in range(len(s)): if s[i]", "-> str: s = list(s) for i in range(len(s)): if", "c in \"abc\": if (i == 0 or s[i-1] !=" ]
[ "get_precipitation_fields(num_prev_files=0): \"\"\"Get a precipitation field from the archive to be", "<reponame>Fangyh09/pysteps \"\"\" Testing helper functions ======================= Collection of helper functions", "data_source[\"fn_pattern\"] fn_ext = data_source[\"fn_ext\"] importer_name = data_source[\"importer\"] importer_kwargs = data_source[\"importer_kwargs\"]", "threshold=0.1, zerovalue=-15.0) return reference_field def smart_assert(actual_value, expected, tolerance=None): \"\"\" Assert", "testing suite. \"\"\" from datetime import datetime import numpy as", "field from the archive to be used as reference.\"\"\" #", "to be used as reference.\"\"\" # Selected case date =", "metadata = stp.utils.to_rainrate(reference_field, metadata) # Mask invalid values reference_field =", "if tolerance is None: assert actual_value == expected else: #", "= rcparams.data_sources[\"mch\"] root_path = data_source[\"root_path\"] path_fmt = data_source[\"path_fmt\"] fn_pattern =", "path_fmt = data_source[\"path_fmt\"] fn_pattern = data_source[\"fn_pattern\"] fn_ext = data_source[\"fn_ext\"] importer_name", "reference_field def smart_assert(actual_value, expected, tolerance=None): \"\"\" Assert by equality for", "by approximation otherwise. If the precision keyword is None, assert", "None, assert that two numeric values (or two sets of", "datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\") data_source = rcparams.data_sources[\"mch\"] root_path = data_source[\"root_path\"] path_fmt =", "precision keyword is None, assert by equality. When the precision", "sets of numbers) are equal to each other within the", "data_source[\"root_path\"] path_fmt = data_source[\"path_fmt\"] fn_pattern = data_source[\"fn_pattern\"] fn_ext = data_source[\"fn_ext\"]", "\"\"\" Assert by equality for non-numeric values, or by approximation", "from datetime import datetime import numpy as np import pytest", "= stp.utils.dB_transform(reference_field, metadata, threshold=0.1, zerovalue=-15.0) return reference_field def smart_assert(actual_value, expected,", "pysteps import io, rcparams def get_precipitation_fields(num_prev_files=0): \"\"\"Get a precipitation field", "# Log-transform the data [dBR] reference_field, metadata = stp.utils.dB_transform(reference_field, metadata,", "up to a certain precision assert actual_value == pytest.approx(expected, 1e-6)", "the testing suite. \"\"\" from datetime import datetime import numpy", "# Remove time dimension # Convert to mm/h reference_field, metadata", "Collection of helper functions for the testing suite. \"\"\" from", "**importer_kwargs) del quality # Not used if num_prev_files == 0:", "of numbers) are equal to each other within the tolerance.", "reference_field, metadata = stp.utils.dB_transform(reference_field, metadata, threshold=0.1, zerovalue=-15.0) return reference_field def", "= data_source[\"fn_pattern\"] fn_ext = data_source[\"fn_ext\"] importer_name = data_source[\"importer\"] importer_kwargs =", "of helper functions for the testing suite. \"\"\" from datetime", "tolerance=None): \"\"\" Assert by equality for non-numeric values, or by", "the tolerance. \"\"\" if tolerance is None: assert actual_value ==", "numbers) are equal to each other within the tolerance. \"\"\"", "reference_field, quality, metadata = io.read_timeseries(fns, importer, **importer_kwargs) del quality #", "to mm/h reference_field, metadata = stp.utils.to_rainrate(reference_field, metadata) # Mask invalid", "stp.utils.to_rainrate(reference_field, metadata) # Mask invalid values reference_field = np.ma.masked_invalid(reference_field) #", "None, assert by equality. When the precision is not None,", "== expected else: # Compare numbers up to a certain", "\"\"\"Get a precipitation field from the archive to be used", "= stp.utils.to_rainrate(reference_field, metadata) # Mask invalid values reference_field = np.ma.masked_invalid(reference_field)", "case date = datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\") data_source = rcparams.data_sources[\"mch\"] root_path =", "quality, metadata = io.read_timeseries(fns, importer, **importer_kwargs) del quality # Not", "import numpy as np import pytest import pysteps as stp", "are equal to each other within the tolerance. \"\"\" if", "input files from the archive fns = io.archive.find_by_date(date, root_path, path_fmt,", "suite. \"\"\" from datetime import datetime import numpy as np", "Selected case date = datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\") data_source = rcparams.data_sources[\"mch\"] root_path", "quality # Not used if num_prev_files == 0: reference_field =", "importer, **importer_kwargs) del quality # Not used if num_prev_files ==", "as np import pytest import pysteps as stp from pysteps", "def get_precipitation_fields(num_prev_files=0): \"\"\"Get a precipitation field from the archive to", "# Find the input files from the archive fns =", "Find the input files from the archive fns = io.archive.find_by_date(date,", "or by approximation otherwise. If the precision keyword is None,", "# Read the radar composites importer = io.get_method(importer_name, \"importer\") reference_field,", "[dBR] reference_field, metadata = stp.utils.dB_transform(reference_field, metadata, threshold=0.1, zerovalue=-15.0) return reference_field", "Read the radar composites importer = io.get_method(importer_name, \"importer\") reference_field, quality,", "used if num_prev_files == 0: reference_field = np.squeeze(reference_field) # Remove", "rcparams.data_sources[\"mch\"] root_path = data_source[\"root_path\"] path_fmt = data_source[\"path_fmt\"] fn_pattern = data_source[\"fn_pattern\"]", "as reference.\"\"\" # Selected case date = datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\") data_source", "data_source[\"importer\"] importer_kwargs = data_source[\"importer_kwargs\"] # Find the input files from", "io.read_timeseries(fns, importer, **importer_kwargs) del quality # Not used if num_prev_files", "numeric values (or two sets of numbers) are equal to", "data [dBR] reference_field, metadata = stp.utils.dB_transform(reference_field, metadata, threshold=0.1, zerovalue=-15.0) return", "expected, tolerance=None): \"\"\" Assert by equality for non-numeric values, or", "equality for non-numeric values, or by approximation otherwise. If the", "otherwise. If the precision keyword is None, assert by equality.", "======================= Collection of helper functions for the testing suite. \"\"\"", "importer_kwargs = data_source[\"importer_kwargs\"] # Find the input files from the", "def smart_assert(actual_value, expected, tolerance=None): \"\"\" Assert by equality for non-numeric", "values, or by approximation otherwise. If the precision keyword is", "fns = io.archive.find_by_date(date, root_path, path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=num_prev_files) #", "tolerance. \"\"\" if tolerance is None: assert actual_value == expected", "datetime import datetime import numpy as np import pytest import", "rcparams def get_precipitation_fields(num_prev_files=0): \"\"\"Get a precipitation field from the archive", "timestep=5, num_prev_files=num_prev_files) # Read the radar composites importer = io.get_method(importer_name,", "invalid values reference_field = np.ma.masked_invalid(reference_field) # Log-transform the data [dBR]", "archive to be used as reference.\"\"\" # Selected case date", "Mask invalid values reference_field = np.ma.masked_invalid(reference_field) # Log-transform the data", "num_prev_files=num_prev_files) # Read the radar composites importer = io.get_method(importer_name, \"importer\")", "equality. When the precision is not None, assert that two", "non-numeric values, or by approximation otherwise. If the precision keyword", "metadata, threshold=0.1, zerovalue=-15.0) return reference_field def smart_assert(actual_value, expected, tolerance=None): \"\"\"", "stp from pysteps import io, rcparams def get_precipitation_fields(num_prev_files=0): \"\"\"Get a", "approximation otherwise. If the precision keyword is None, assert by", "= datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\") data_source = rcparams.data_sources[\"mch\"] root_path = data_source[\"root_path\"] path_fmt", "metadata = io.read_timeseries(fns, importer, **importer_kwargs) del quality # Not used", "helper functions ======================= Collection of helper functions for the testing", "data_source = rcparams.data_sources[\"mch\"] root_path = data_source[\"root_path\"] path_fmt = data_source[\"path_fmt\"] fn_pattern", "= data_source[\"fn_ext\"] importer_name = data_source[\"importer\"] importer_kwargs = data_source[\"importer_kwargs\"] # Find", "within the tolerance. \"\"\" if tolerance is None: assert actual_value", "return reference_field def smart_assert(actual_value, expected, tolerance=None): \"\"\" Assert by equality", "archive fns = io.archive.find_by_date(date, root_path, path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=num_prev_files)", "import pysteps as stp from pysteps import io, rcparams def", "# Selected case date = datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\") data_source = rcparams.data_sources[\"mch\"]", "equal to each other within the tolerance. \"\"\" if tolerance", "for non-numeric values, or by approximation otherwise. If the precision", "pysteps as stp from pysteps import io, rcparams def get_precipitation_fields(num_prev_files=0):", "the precision is not None, assert that two numeric values", "date = datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\") data_source = rcparams.data_sources[\"mch\"] root_path = data_source[\"root_path\"]", "= io.get_method(importer_name, \"importer\") reference_field, quality, metadata = io.read_timeseries(fns, importer, **importer_kwargs)", "If the precision keyword is None, assert by equality. When", "by equality. When the precision is not None, assert that", "reference_field, metadata = stp.utils.to_rainrate(reference_field, metadata) # Mask invalid values reference_field", "precipitation field from the archive to be used as reference.\"\"\"", "two numeric values (or two sets of numbers) are equal", "# Not used if num_prev_files == 0: reference_field = np.squeeze(reference_field)", "Testing helper functions ======================= Collection of helper functions for the", "composites importer = io.get_method(importer_name, \"importer\") reference_field, quality, metadata = io.read_timeseries(fns,", "import datetime import numpy as np import pytest import pysteps", "io.get_method(importer_name, \"importer\") reference_field, quality, metadata = io.read_timeseries(fns, importer, **importer_kwargs) del", "dimension # Convert to mm/h reference_field, metadata = stp.utils.to_rainrate(reference_field, metadata)", "the data [dBR] reference_field, metadata = stp.utils.dB_transform(reference_field, metadata, threshold=0.1, zerovalue=-15.0)", "assert that two numeric values (or two sets of numbers)", "else: # Compare numbers up to a certain precision assert", "time dimension # Convert to mm/h reference_field, metadata = stp.utils.to_rainrate(reference_field,", "\"importer\") reference_field, quality, metadata = io.read_timeseries(fns, importer, **importer_kwargs) del quality", "the archive to be used as reference.\"\"\" # Selected case", "files from the archive fns = io.archive.find_by_date(date, root_path, path_fmt, fn_pattern,", "by equality for non-numeric values, or by approximation otherwise. If", "from the archive to be used as reference.\"\"\" # Selected", "data_source[\"fn_ext\"] importer_name = data_source[\"importer\"] importer_kwargs = data_source[\"importer_kwargs\"] # Find the", "# Compare numbers up to a certain precision assert actual_value", "Assert by equality for non-numeric values, or by approximation otherwise.", "np.ma.masked_invalid(reference_field) # Log-transform the data [dBR] reference_field, metadata = stp.utils.dB_transform(reference_field,", "is None: assert actual_value == expected else: # Compare numbers", "the input files from the archive fns = io.archive.find_by_date(date, root_path,", "Log-transform the data [dBR] reference_field, metadata = stp.utils.dB_transform(reference_field, metadata, threshold=0.1,", "import io, rcparams def get_precipitation_fields(num_prev_files=0): \"\"\"Get a precipitation field from", "(or two sets of numbers) are equal to each other", "= data_source[\"importer\"] importer_kwargs = data_source[\"importer_kwargs\"] # Find the input files", "# Convert to mm/h reference_field, metadata = stp.utils.to_rainrate(reference_field, metadata) #", "functions ======================= Collection of helper functions for the testing suite.", "numbers up to a certain precision assert actual_value == pytest.approx(expected,", "actual_value == expected else: # Compare numbers up to a", "num_prev_files == 0: reference_field = np.squeeze(reference_field) # Remove time dimension", "as stp from pysteps import io, rcparams def get_precipitation_fields(num_prev_files=0): \"\"\"Get", "helper functions for the testing suite. \"\"\" from datetime import", "reference.\"\"\" # Selected case date = datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\") data_source =", "values reference_field = np.ma.masked_invalid(reference_field) # Log-transform the data [dBR] reference_field,", "the precision keyword is None, assert by equality. When the", "functions for the testing suite. \"\"\" from datetime import datetime", "path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=num_prev_files) # Read the radar composites", "the archive fns = io.archive.find_by_date(date, root_path, path_fmt, fn_pattern, fn_ext, timestep=5,", "is not None, assert that two numeric values (or two", "data_source[\"path_fmt\"] fn_pattern = data_source[\"fn_pattern\"] fn_ext = data_source[\"fn_ext\"] importer_name = data_source[\"importer\"]", "fn_ext = data_source[\"fn_ext\"] importer_name = data_source[\"importer\"] importer_kwargs = data_source[\"importer_kwargs\"] #", "datetime import numpy as np import pytest import pysteps as", "= data_source[\"root_path\"] path_fmt = data_source[\"path_fmt\"] fn_pattern = data_source[\"fn_pattern\"] fn_ext =", "np.squeeze(reference_field) # Remove time dimension # Convert to mm/h reference_field,", "stp.utils.dB_transform(reference_field, metadata, threshold=0.1, zerovalue=-15.0) return reference_field def smart_assert(actual_value, expected, tolerance=None):", "other within the tolerance. \"\"\" if tolerance is None: assert", "When the precision is not None, assert that two numeric", "= data_source[\"path_fmt\"] fn_pattern = data_source[\"fn_pattern\"] fn_ext = data_source[\"fn_ext\"] importer_name =", "None: assert actual_value == expected else: # Compare numbers up", "reference_field = np.squeeze(reference_field) # Remove time dimension # Convert to", "used as reference.\"\"\" # Selected case date = datetime.strptime(\"201505151630\", \"%Y%m%d%H%M\")", "that two numeric values (or two sets of numbers) are", "import pytest import pysteps as stp from pysteps import io,", "the radar composites importer = io.get_method(importer_name, \"importer\") reference_field, quality, metadata", "to each other within the tolerance. \"\"\" if tolerance is", "del quality # Not used if num_prev_files == 0: reference_field", "reference_field = np.ma.masked_invalid(reference_field) # Log-transform the data [dBR] reference_field, metadata", "keyword is None, assert by equality. When the precision is", "tolerance is None: assert actual_value == expected else: # Compare", "importer = io.get_method(importer_name, \"importer\") reference_field, quality, metadata = io.read_timeseries(fns, importer,", "be used as reference.\"\"\" # Selected case date = datetime.strptime(\"201505151630\",", "# Mask invalid values reference_field = np.ma.masked_invalid(reference_field) # Log-transform the", "= io.archive.find_by_date(date, root_path, path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=num_prev_files) # Read", "mm/h reference_field, metadata = stp.utils.to_rainrate(reference_field, metadata) # Mask invalid values", "from pysteps import io, rcparams def get_precipitation_fields(num_prev_files=0): \"\"\"Get a precipitation", "fn_pattern = data_source[\"fn_pattern\"] fn_ext = data_source[\"fn_ext\"] importer_name = data_source[\"importer\"] importer_kwargs", "not None, assert that two numeric values (or two sets", "each other within the tolerance. \"\"\" if tolerance is None:", "importer_name = data_source[\"importer\"] importer_kwargs = data_source[\"importer_kwargs\"] # Find the input", "if num_prev_files == 0: reference_field = np.squeeze(reference_field) # Remove time", "from the archive fns = io.archive.find_by_date(date, root_path, path_fmt, fn_pattern, fn_ext,", "precision is not None, assert that two numeric values (or", "Convert to mm/h reference_field, metadata = stp.utils.to_rainrate(reference_field, metadata) # Mask", "radar composites importer = io.get_method(importer_name, \"importer\") reference_field, quality, metadata =", "smart_assert(actual_value, expected, tolerance=None): \"\"\" Assert by equality for non-numeric values,", "assert actual_value == expected else: # Compare numbers up to", "io, rcparams def get_precipitation_fields(num_prev_files=0): \"\"\"Get a precipitation field from the", "= np.ma.masked_invalid(reference_field) # Log-transform the data [dBR] reference_field, metadata =", "is None, assert by equality. When the precision is not", "fn_ext, timestep=5, num_prev_files=num_prev_files) # Read the radar composites importer =", "metadata) # Mask invalid values reference_field = np.ma.masked_invalid(reference_field) # Log-transform", "values (or two sets of numbers) are equal to each", "assert by equality. When the precision is not None, assert", "0: reference_field = np.squeeze(reference_field) # Remove time dimension # Convert", "two sets of numbers) are equal to each other within", "a precipitation field from the archive to be used as", "pytest import pysteps as stp from pysteps import io, rcparams", "metadata = stp.utils.dB_transform(reference_field, metadata, threshold=0.1, zerovalue=-15.0) return reference_field def smart_assert(actual_value,", "for the testing suite. \"\"\" from datetime import datetime import", "numpy as np import pytest import pysteps as stp from", "= data_source[\"importer_kwargs\"] # Find the input files from the archive", "data_source[\"importer_kwargs\"] # Find the input files from the archive fns", "Not used if num_prev_files == 0: reference_field = np.squeeze(reference_field) #", "\"%Y%m%d%H%M\") data_source = rcparams.data_sources[\"mch\"] root_path = data_source[\"root_path\"] path_fmt = data_source[\"path_fmt\"]", "== 0: reference_field = np.squeeze(reference_field) # Remove time dimension #", "\"\"\" from datetime import datetime import numpy as np import", "\"\"\" if tolerance is None: assert actual_value == expected else:", "io.archive.find_by_date(date, root_path, path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=num_prev_files) # Read the", "expected else: # Compare numbers up to a certain precision", "root_path, path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=num_prev_files) # Read the radar", "Remove time dimension # Convert to mm/h reference_field, metadata =", "Compare numbers up to a certain precision assert actual_value ==", "zerovalue=-15.0) return reference_field def smart_assert(actual_value, expected, tolerance=None): \"\"\" Assert by", "= io.read_timeseries(fns, importer, **importer_kwargs) del quality # Not used if", "\"\"\" Testing helper functions ======================= Collection of helper functions for", "root_path = data_source[\"root_path\"] path_fmt = data_source[\"path_fmt\"] fn_pattern = data_source[\"fn_pattern\"] fn_ext", "= np.squeeze(reference_field) # Remove time dimension # Convert to mm/h", "fn_pattern, fn_ext, timestep=5, num_prev_files=num_prev_files) # Read the radar composites importer", "np import pytest import pysteps as stp from pysteps import" ]
[ "('/unit', lessons.UnitHandler)] global custom_module # pylint: disable=global-statement custom_module = custom_modules.Module(", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "All_LOCALES_DESCRIPTION = 'Can pick all locales, including unavailable ones.' SEE_DRAFTS_PERMISSION", "All Rights Reserved. # # Licensed under the Apache License,", "2.0 (the \"License\"); # you may not use this file", "file except in compliance with the License. # You may", "agreed to in writing, software # distributed under the License", "Unless required by applicable law or agreed to in writing,", "lessons from controllers import utils from models import content from", "from models import resources_display from models import custom_modules from models", "the License is distributed on an \"AS-IS\" BASIS, # WITHOUT", "('/activity', lessons.UnitHandler), ('/answer', assessments.AnswerHandler), ('/assessment', lessons.AssessmentHandler), ('/course', lessons.CourseHandler), ('/forum', utils.ForumHandler),", "utils.StudentEditStudentHandler), ('/student/settracks', utils.StudentSetTracksHandler), ('/student/home', utils.StudentProfileHandler), ('/student/unenroll', utils.StudentUnenrollHandler), ('/unit', lessons.UnitHandler)] global", "from models import roles from tools import verify All_LOCALES_PERMISSION =", "app_context, custom_module, All_LOCALES_PERMISSION) def can_see_drafts(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, SEE_DRAFTS_PERMISSION)", "the registry.\"\"\" def on_module_enabled(): roles.Roles.register_permissions(custom_module, permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink)", "('/preview', utils.PreviewHandler), ('/register', utils.RegisterHandler), ('/resources', utils.ResourcesHandler), ('/rest/locale', utils.StudentLocaleRESTHandler), ('/review', lessons.ReviewHandler),", "Inc. All Rights Reserved. # # Licensed under the Apache", "\"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "import resources_display from models import custom_modules from models import roles", "import content from models import resources_display from models import custom_modules", "from tools import verify All_LOCALES_PERMISSION = 'can_pick_all_locales' All_LOCALES_DESCRIPTION = 'Can", "draft status.' custom_module = None def can_pick_all_locales(app_context): return roles.Roles.is_user_allowed( app_context,", "the specific language governing permissions and # limitations under the", "parser to verify verify.parse_content = content.parse_string_in_scope # setup routes courses_routes", "content.parse_string_in_scope # setup routes courses_routes = [ ('/', lessons.CourseHandler), ('/activity',", "models import resources_display from models import custom_modules from models import", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook) def permissions_callback(unused_app_context): return [ roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION)", "[ roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION) ] # provide parser to", "models import content from models import resources_display from models import", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "permissions and # limitations under the License. \"\"\"Courses module.\"\"\" __author__", "routes courses_routes = [ ('/', lessons.CourseHandler), ('/activity', lessons.UnitHandler), ('/answer', assessments.AnswerHandler),", "except in compliance with the License. # You may obtain", "for delivering an online course.', [], courses_routes, notify_module_enabled=on_module_enabled) return custom_module", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "('/resources', utils.ResourcesHandler), ('/rest/locale', utils.StudentLocaleRESTHandler), ('/review', lessons.ReviewHandler), ('/reviewdashboard', lessons.ReviewDashboardHandler), ('/student/editstudent', utils.StudentEditStudentHandler),", "Copyright 2012 Google Inc. All Rights Reserved. # # Licensed", "pick all locales, including unavailable ones.' SEE_DRAFTS_PERMISSION = 'can_see_draft_content' SEE_DRAFTS_DESCRIPTION", "assessments with draft status.' custom_module = None def can_pick_all_locales(app_context): return", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "verify.parse_content = content.parse_string_in_scope # setup routes courses_routes = [ ('/',", "('/assessment', lessons.AssessmentHandler), ('/course', lessons.CourseHandler), ('/forum', utils.ForumHandler), ('/preview', utils.PreviewHandler), ('/register', utils.RegisterHandler),", "not use this file except in compliance with the License.", "lessons.ReviewDashboardHandler), ('/student/editstudent', utils.StudentEditStudentHandler), ('/student/settracks', utils.StudentSetTracksHandler), ('/student/home', utils.StudentProfileHandler), ('/student/unenroll', utils.StudentUnenrollHandler), ('/unit',", "# limitations under the License. \"\"\"Courses module.\"\"\" __author__ = '<NAME>", "this module in the registry.\"\"\" def on_module_enabled(): roles.Roles.register_permissions(custom_module, permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings)", "the License. \"\"\"Courses module.\"\"\" __author__ = '<NAME> (<EMAIL>)' from common", "All_LOCALES_PERMISSION = 'can_pick_all_locales' All_LOCALES_DESCRIPTION = 'Can pick all locales, including", "writing, software # distributed under the License is distributed on", "All_LOCALES_PERMISSION) def can_see_drafts(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, SEE_DRAFTS_PERMISSION) def register_module():", "SEE_DRAFTS_PERMISSION = 'can_see_draft_content' SEE_DRAFTS_DESCRIPTION = 'Can see lessons and assessments", "in writing, software # distributed under the License is distributed", "lessons.AssessmentHandler), ('/course', lessons.CourseHandler), ('/forum', utils.ForumHandler), ('/preview', utils.PreviewHandler), ('/register', utils.RegisterHandler), ('/resources',", "Google Inc. All Rights Reserved. # # Licensed under the", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "def permissions_callback(unused_app_context): return [ roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION) ] #", "unavailable ones.' SEE_DRAFTS_PERMISSION = 'can_see_draft_content' SEE_DRAFTS_DESCRIPTION = 'Can see lessons", "utils from models import content from models import resources_display from", "SEE_DRAFTS_PERMISSION) def register_module(): \"\"\"Registers this module in the registry.\"\"\" def", "('/register', utils.RegisterHandler), ('/resources', utils.ResourcesHandler), ('/rest/locale', utils.StudentLocaleRESTHandler), ('/review', lessons.ReviewHandler), ('/reviewdashboard', lessons.ReviewDashboardHandler),", "('/answer', assessments.AnswerHandler), ('/assessment', lessons.AssessmentHandler), ('/course', lessons.CourseHandler), ('/forum', utils.ForumHandler), ('/preview', utils.PreviewHandler),", "def can_pick_all_locales(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, All_LOCALES_PERMISSION) def can_see_drafts(app_context): return", "License. \"\"\"Courses module.\"\"\" __author__ = '<NAME> (<EMAIL>)' from common import", "def can_see_drafts(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, SEE_DRAFTS_PERMISSION) def register_module(): \"\"\"Registers", "# provide parser to verify verify.parse_content = content.parse_string_in_scope # setup", "language governing permissions and # limitations under the License. \"\"\"Courses", "= '<NAME> (<EMAIL>)' from common import resource from controllers import", "controllers import lessons from controllers import utils from models import", "can_see_drafts(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, SEE_DRAFTS_PERMISSION) def register_module(): \"\"\"Registers this", "custom_module, SEE_DRAFTS_PERMISSION) def register_module(): \"\"\"Registers this module in the registry.\"\"\"", "use this file except in compliance with the License. #", "limitations under the License. \"\"\"Courses module.\"\"\" __author__ = '<NAME> (<EMAIL>)'", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "return [ roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION) ] # provide parser", "utils.ForumHandler), ('/preview', utils.PreviewHandler), ('/register', utils.RegisterHandler), ('/resources', utils.ResourcesHandler), ('/rest/locale', utils.StudentLocaleRESTHandler), ('/review',", "module.\"\"\" __author__ = '<NAME> (<EMAIL>)' from common import resource from", "import roles from tools import verify All_LOCALES_PERMISSION = 'can_pick_all_locales' All_LOCALES_DESCRIPTION", "with draft status.' custom_module = None def can_pick_all_locales(app_context): return roles.Roles.is_user_allowed(", "set of pages for delivering an online course.', [], courses_routes,", "assessments from controllers import lessons from controllers import utils from", "under the License is distributed on an \"AS-IS\" BASIS, #", "utils.RegisterHandler), ('/resources', utils.ResourcesHandler), ('/rest/locale', utils.StudentLocaleRESTHandler), ('/review', lessons.ReviewHandler), ('/reviewdashboard', lessons.ReviewDashboardHandler), ('/student/editstudent',", "CONDITIONS OF ANY KIND, either express or implied. # See", "= [ ('/', lessons.CourseHandler), ('/activity', lessons.UnitHandler), ('/answer', assessments.AnswerHandler), ('/assessment', lessons.AssessmentHandler),", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "from controllers import lessons from controllers import utils from models", "lessons.CourseHandler), ('/activity', lessons.UnitHandler), ('/answer', assessments.AnswerHandler), ('/assessment', lessons.AssessmentHandler), ('/course', lessons.CourseHandler), ('/forum',", "module in the registry.\"\"\" def on_module_enabled(): roles.Roles.register_permissions(custom_module, permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit)", "or implied. # See the License for the specific language", "('/', lessons.CourseHandler), ('/activity', lessons.UnitHandler), ('/answer', assessments.AnswerHandler), ('/assessment', lessons.AssessmentHandler), ('/course', lessons.CourseHandler),", "Rights Reserved. # # Licensed under the Apache License, Version", "('/review', lessons.ReviewHandler), ('/reviewdashboard', lessons.ReviewDashboardHandler), ('/student/editstudent', utils.StudentEditStudentHandler), ('/student/settracks', utils.StudentSetTracksHandler), ('/student/home', utils.StudentProfileHandler),", "License. # You may obtain a copy of the License", "from models import custom_modules from models import roles from tools", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "from models import content from models import resources_display from models", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "custom_modules from models import roles from tools import verify All_LOCALES_PERMISSION", "roles from tools import verify All_LOCALES_PERMISSION = 'can_pick_all_locales' All_LOCALES_DESCRIPTION =", "permissions_callback(unused_app_context): return [ roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION) ] # provide", "SEE_DRAFTS_DESCRIPTION = 'Can see lessons and assessments with draft status.'", "ones.' SEE_DRAFTS_PERMISSION = 'can_see_draft_content' SEE_DRAFTS_DESCRIPTION = 'Can see lessons and", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "see lessons and assessments with draft status.' custom_module = None", "app_context, custom_module, SEE_DRAFTS_PERMISSION) def register_module(): \"\"\"Registers this module in the", "License for the specific language governing permissions and # limitations", "custom_modules.Module( 'Course', 'A set of pages for delivering an online", "'Can pick all locales, including unavailable ones.' SEE_DRAFTS_PERMISSION = 'can_see_draft_content'", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "resource.Registry.register(utils.ResourceHtmlHook) def permissions_callback(unused_app_context): return [ roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION) ]", "utils.ResourcesHandler), ('/rest/locale', utils.StudentLocaleRESTHandler), ('/review', lessons.ReviewHandler), ('/reviewdashboard', lessons.ReviewDashboardHandler), ('/student/editstudent', utils.StudentEditStudentHandler), ('/student/settracks',", "# Copyright 2012 Google Inc. All Rights Reserved. # #", "Reserved. # # Licensed under the Apache License, Version 2.0", "# pylint: disable=global-statement custom_module = custom_modules.Module( 'Course', 'A set of", "('/forum', utils.ForumHandler), ('/preview', utils.PreviewHandler), ('/register', utils.RegisterHandler), ('/resources', utils.ResourcesHandler), ('/rest/locale', utils.StudentLocaleRESTHandler),", "import lessons from controllers import utils from models import content", "'can_see_draft_content' SEE_DRAFTS_DESCRIPTION = 'Can see lessons and assessments with draft", "of pages for delivering an online course.', [], courses_routes, notify_module_enabled=on_module_enabled)", "roles.Roles.register_permissions(custom_module, permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook) def permissions_callback(unused_app_context):", "custom_module = None def can_pick_all_locales(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, All_LOCALES_PERMISSION)", "models import custom_modules from models import roles from tools import", "on_module_enabled(): roles.Roles.register_permissions(custom_module, permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook) def", "All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION) ] # provide parser to verify verify.parse_content", "the License for the specific language governing permissions and #", "\"\"\"Courses module.\"\"\" __author__ = '<NAME> (<EMAIL>)' from common import resource", "(<EMAIL>)' from common import resource from controllers import assessments from", "(the \"License\"); # you may not use this file except", "from controllers import assessments from controllers import lessons from controllers", "import custom_modules from models import roles from tools import verify", "Apache License, Version 2.0 (the \"License\"); # you may not", "\"\"\"Registers this module in the registry.\"\"\" def on_module_enabled(): roles.Roles.register_permissions(custom_module, permissions_callback)", "verify verify.parse_content = content.parse_string_in_scope # setup routes courses_routes = [", "# you may not use this file except in compliance", "either express or implied. # See the License for the", "resources_display from models import custom_modules from models import roles from", "= 'Can pick all locales, including unavailable ones.' SEE_DRAFTS_PERMISSION =", "utils.PreviewHandler), ('/register', utils.RegisterHandler), ('/resources', utils.ResourcesHandler), ('/rest/locale', utils.StudentLocaleRESTHandler), ('/review', lessons.ReviewHandler), ('/reviewdashboard',", "verify All_LOCALES_PERMISSION = 'can_pick_all_locales' All_LOCALES_DESCRIPTION = 'Can pick all locales,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "distributed under the License is distributed on an \"AS-IS\" BASIS,", "from common import resource from controllers import assessments from controllers", "resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook) def permissions_callback(unused_app_context): return [", "# setup routes courses_routes = [ ('/', lessons.CourseHandler), ('/activity', lessons.UnitHandler),", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "import resource from controllers import assessments from controllers import lessons", "status.' custom_module = None def can_pick_all_locales(app_context): return roles.Roles.is_user_allowed( app_context, custom_module,", "pylint: disable=global-statement custom_module = custom_modules.Module( 'Course', 'A set of pages", "custom_module = custom_modules.Module( 'Course', 'A set of pages for delivering", "global custom_module # pylint: disable=global-statement custom_module = custom_modules.Module( 'Course', 'A", "in compliance with the License. # You may obtain a", "import verify All_LOCALES_PERMISSION = 'can_pick_all_locales' All_LOCALES_DESCRIPTION = 'Can pick all", "'can_pick_all_locales' All_LOCALES_DESCRIPTION = 'Can pick all locales, including unavailable ones.'", "custom_module # pylint: disable=global-statement custom_module = custom_modules.Module( 'Course', 'A set", "software # distributed under the License is distributed on an", "SEE_DRAFTS_DESCRIPTION) ] # provide parser to verify verify.parse_content = content.parse_string_in_scope", "('/student/unenroll', utils.StudentUnenrollHandler), ('/unit', lessons.UnitHandler)] global custom_module # pylint: disable=global-statement custom_module", "lessons.UnitHandler), ('/answer', assessments.AnswerHandler), ('/assessment', lessons.AssessmentHandler), ('/course', lessons.CourseHandler), ('/forum', utils.ForumHandler), ('/preview',", "# # Unless required by applicable law or agreed to", "distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "utils.StudentLocaleRESTHandler), ('/review', lessons.ReviewHandler), ('/reviewdashboard', lessons.ReviewDashboardHandler), ('/student/editstudent', utils.StudentEditStudentHandler), ('/student/settracks', utils.StudentSetTracksHandler), ('/student/home',", "to verify verify.parse_content = content.parse_string_in_scope # setup routes courses_routes =", "'A set of pages for delivering an online course.', [],", "return roles.Roles.is_user_allowed( app_context, custom_module, All_LOCALES_PERMISSION) def can_see_drafts(app_context): return roles.Roles.is_user_allowed( app_context,", "('/rest/locale', utils.StudentLocaleRESTHandler), ('/review', lessons.ReviewHandler), ('/reviewdashboard', lessons.ReviewDashboardHandler), ('/student/editstudent', utils.StudentEditStudentHandler), ('/student/settracks', utils.StudentSetTracksHandler),", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "def register_module(): \"\"\"Registers this module in the registry.\"\"\" def on_module_enabled():", "roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION) ] # provide parser to verify verify.parse_content =", "] # provide parser to verify verify.parse_content = content.parse_string_in_scope #", "('/student/home', utils.StudentProfileHandler), ('/student/unenroll', utils.StudentUnenrollHandler), ('/unit', lessons.UnitHandler)] global custom_module # pylint:", "content from models import resources_display from models import custom_modules from", "resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook) def permissions_callback(unused_app_context): return [ roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION),", "Version 2.0 (the \"License\"); # you may not use this", "tools import verify All_LOCALES_PERMISSION = 'can_pick_all_locales' All_LOCALES_DESCRIPTION = 'Can pick", "and assessments with draft status.' custom_module = None def can_pick_all_locales(app_context):", "= 'can_see_draft_content' SEE_DRAFTS_DESCRIPTION = 'Can see lessons and assessments with", "roles.Roles.is_user_allowed( app_context, custom_module, All_LOCALES_PERMISSION) def can_see_drafts(app_context): return roles.Roles.is_user_allowed( app_context, custom_module,", "__author__ = '<NAME> (<EMAIL>)' from common import resource from controllers", "disable=global-statement custom_module = custom_modules.Module( 'Course', 'A set of pages for", "custom_module, All_LOCALES_PERMISSION) def can_see_drafts(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, SEE_DRAFTS_PERMISSION) def", "law or agreed to in writing, software # distributed under", "# distributed under the License is distributed on an \"AS-IS\"", "controllers import utils from models import content from models import", "= 'Can see lessons and assessments with draft status.' custom_module", "('/reviewdashboard', lessons.ReviewDashboardHandler), ('/student/editstudent', utils.StudentEditStudentHandler), ('/student/settracks', utils.StudentSetTracksHandler), ('/student/home', utils.StudentProfileHandler), ('/student/unenroll', utils.StudentUnenrollHandler),", "import assessments from controllers import lessons from controllers import utils", "utils.StudentUnenrollHandler), ('/unit', lessons.UnitHandler)] global custom_module # pylint: disable=global-statement custom_module =", "lessons.UnitHandler)] global custom_module # pylint: disable=global-statement custom_module = custom_modules.Module( 'Course',", "None def can_pick_all_locales(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, All_LOCALES_PERMISSION) def can_see_drafts(app_context):", "permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook) def permissions_callback(unused_app_context): return", "= custom_modules.Module( 'Course', 'A set of pages for delivering an", "resource from controllers import assessments from controllers import lessons from", "roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION, SEE_DRAFTS_DESCRIPTION) ] # provide parser to verify", "an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "implied. # See the License for the specific language governing", "= None def can_pick_all_locales(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, All_LOCALES_PERMISSION) def", "utils.StudentSetTracksHandler), ('/student/home', utils.StudentProfileHandler), ('/student/unenroll', utils.StudentUnenrollHandler), ('/unit', lessons.UnitHandler)] global custom_module #", "is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES OR", "under the Apache License, Version 2.0 (the \"License\"); # you", "\"License\"); # you may not use this file except in", "locales, including unavailable ones.' SEE_DRAFTS_PERMISSION = 'can_see_draft_content' SEE_DRAFTS_DESCRIPTION = 'Can", "('/student/editstudent', utils.StudentEditStudentHandler), ('/student/settracks', utils.StudentSetTracksHandler), ('/student/home', utils.StudentProfileHandler), ('/student/unenroll', utils.StudentUnenrollHandler), ('/unit', lessons.UnitHandler)]", "in the registry.\"\"\" def on_module_enabled(): roles.Roles.register_permissions(custom_module, permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment)", "return roles.Roles.is_user_allowed( app_context, custom_module, SEE_DRAFTS_PERMISSION) def register_module(): \"\"\"Registers this module", "= 'can_pick_all_locales' All_LOCALES_DESCRIPTION = 'Can pick all locales, including unavailable", "governing permissions and # limitations under the License. \"\"\"Courses module.\"\"\"", "courses_routes = [ ('/', lessons.CourseHandler), ('/activity', lessons.UnitHandler), ('/answer', assessments.AnswerHandler), ('/assessment',", "('/course', lessons.CourseHandler), ('/forum', utils.ForumHandler), ('/preview', utils.PreviewHandler), ('/register', utils.RegisterHandler), ('/resources', utils.ResourcesHandler),", "models import roles from tools import verify All_LOCALES_PERMISSION = 'can_pick_all_locales'", "assessments.AnswerHandler), ('/assessment', lessons.AssessmentHandler), ('/course', lessons.CourseHandler), ('/forum', utils.ForumHandler), ('/preview', utils.PreviewHandler), ('/register',", "import utils from models import content from models import resources_display", "def on_module_enabled(): roles.Roles.register_permissions(custom_module, permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook)", "by applicable law or agreed to in writing, software #", "<reponame>ehiller/mobilecsp-v18<filename>modules/courses/courses.py # Copyright 2012 Google Inc. All Rights Reserved. #", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "OF ANY KIND, either express or implied. # See the", "'<NAME> (<EMAIL>)' from common import resource from controllers import assessments", "setup routes courses_routes = [ ('/', lessons.CourseHandler), ('/activity', lessons.UnitHandler), ('/answer',", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "registry.\"\"\" def on_module_enabled(): roles.Roles.register_permissions(custom_module, permissions_callback) resource.Registry.register(resources_display.ResourceCourseSettings) resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson)", "lessons.CourseHandler), ('/forum', utils.ForumHandler), ('/preview', utils.PreviewHandler), ('/register', utils.RegisterHandler), ('/resources', utils.ResourcesHandler), ('/rest/locale',", "'Course', 'A set of pages for delivering an online course.',", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "register_module(): \"\"\"Registers this module in the registry.\"\"\" def on_module_enabled(): roles.Roles.register_permissions(custom_module,", "can_pick_all_locales(app_context): return roles.Roles.is_user_allowed( app_context, custom_module, All_LOCALES_PERMISSION) def can_see_drafts(app_context): return roles.Roles.is_user_allowed(", "('/student/settracks', utils.StudentSetTracksHandler), ('/student/home', utils.StudentProfileHandler), ('/student/unenroll', utils.StudentUnenrollHandler), ('/unit', lessons.UnitHandler)] global custom_module", "to in writing, software # distributed under the License is", "lessons and assessments with draft status.' custom_module = None def", "# See the License for the specific language governing permissions", "from controllers import utils from models import content from models", "You may obtain a copy of the License at #", "resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook) def permissions_callback(unused_app_context): return [ roles.Permission(All_LOCALES_PERMISSION, All_LOCALES_DESCRIPTION), roles.Permission(SEE_DRAFTS_PERMISSION,", "lessons.ReviewHandler), ('/reviewdashboard', lessons.ReviewDashboardHandler), ('/student/editstudent', utils.StudentEditStudentHandler), ('/student/settracks', utils.StudentSetTracksHandler), ('/student/home', utils.StudentProfileHandler), ('/student/unenroll',", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "under the License. \"\"\"Courses module.\"\"\" __author__ = '<NAME> (<EMAIL>)' from", "and # limitations under the License. \"\"\"Courses module.\"\"\" __author__ =", "required by applicable law or agreed to in writing, software", "utils.StudentProfileHandler), ('/student/unenroll', utils.StudentUnenrollHandler), ('/unit', lessons.UnitHandler)] global custom_module # pylint: disable=global-statement", "resource.Registry.register(resources_display.ResourceUnit) resource.Registry.register(resources_display.ResourceAssessment) resource.Registry.register(resources_display.ResourceLink) resource.Registry.register(resources_display.ResourceLesson) resource.Registry.register(utils.ResourceHtmlHook) def permissions_callback(unused_app_context): return [ roles.Permission(All_LOCALES_PERMISSION,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "controllers import assessments from controllers import lessons from controllers import", "common import resource from controllers import assessments from controllers import", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "[ ('/', lessons.CourseHandler), ('/activity', lessons.UnitHandler), ('/answer', assessments.AnswerHandler), ('/assessment', lessons.AssessmentHandler), ('/course',", "License is distributed on an \"AS-IS\" BASIS, # WITHOUT WARRANTIES", "the Apache License, Version 2.0 (the \"License\"); # you may", "all locales, including unavailable ones.' SEE_DRAFTS_PERMISSION = 'can_see_draft_content' SEE_DRAFTS_DESCRIPTION =", "including unavailable ones.' SEE_DRAFTS_PERMISSION = 'can_see_draft_content' SEE_DRAFTS_DESCRIPTION = 'Can see", "'Can see lessons and assessments with draft status.' custom_module =", "= content.parse_string_in_scope # setup routes courses_routes = [ ('/', lessons.CourseHandler),", "roles.Roles.is_user_allowed( app_context, custom_module, SEE_DRAFTS_PERMISSION) def register_module(): \"\"\"Registers this module in", "2012 Google Inc. All Rights Reserved. # # Licensed under", "pages for delivering an online course.', [], courses_routes, notify_module_enabled=on_module_enabled) return", "provide parser to verify verify.parse_content = content.parse_string_in_scope # setup routes" ]
[ "class PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"): \"\"\" The manager of the all build", "used by other packages\" share = merlin.properties.path() share.doc = \"architecture", "\"\"\" The manager of the all build products, both final", "= \"architecture independent package files\" var = merlin.properties.path() var.doc =", "independent package files\" var = merlin.properties.path() var.doc = \"runtime files\"", "state bin = merlin.properties.path() bin.doc = \"the location of executables\"", "to be used by other packages\" share = merlin.properties.path() share.doc", "implementation \"\"\" # choose the default implementer return merlin.components.fhs #", "files\" doc = merlin.properties.path() doc.doc = \"package documentation\" etc =", "\"runtime files\" # framework hooks @classmethod def pyre_default(cls, **kwds): \"\"\"", "disposables \"\"\" # required state bin = merlin.properties.path() bin.doc =", "files\" # framework hooks @classmethod def pyre_default(cls, **kwds): \"\"\" Specify", "choose the default implementer return merlin.components.fhs # end of file", "merlin.properties.path() doc.doc = \"package documentation\" etc = merlin.properties.path() etc.doc =", "build products class PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"): \"\"\" The manager of the", "<NAME> <<EMAIL>> # (c) 1998-2021 all rights reserved # support", "include.doc = \"library header files\" lib = merlin.properties.path() lib.doc =", "config.doc = \"global package configuration files\" doc = merlin.properties.path() doc.doc", "# # <NAME> <<EMAIL>> # (c) 1998-2021 all rights reserved", "the manager of intermediate and final build products class PrefixLayout(merlin.protocol,", "header files\" lib = merlin.properties.path() lib.doc = \"libraries\" libexec =", "intermediate disposables \"\"\" # required state bin = merlin.properties.path() bin.doc", "\"global package configuration files\" doc = merlin.properties.path() doc.doc = \"package", "merlin.properties.path() etc.doc = \"host specific files\" include = merlin.properties.path() include.doc", "<gh_stars>10-100 # -*- coding: utf-8 -*- # # <NAME> <<EMAIL>>", "1998-2021 all rights reserved # support import merlin # the", "etc.doc = \"host specific files\" include = merlin.properties.path() include.doc =", "specific files\" include = merlin.properties.path() include.doc = \"library header files\"", "\"library header files\" lib = merlin.properties.path() lib.doc = \"libraries\" libexec", "merlin.properties.path() share.doc = \"architecture independent package files\" var = merlin.properties.path()", "intermediate and final build products class PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"): \"\"\" The", "def pyre_default(cls, **kwds): \"\"\" Specify the default implementation \"\"\" #", "configuration files\" doc = merlin.properties.path() doc.doc = \"package documentation\" etc", "all rights reserved # support import merlin # the manager", "\"host specific files\" include = merlin.properties.path() include.doc = \"library header", "# <NAME> <<EMAIL>> # (c) 1998-2021 all rights reserved #", "merlin.properties.path() include.doc = \"library header files\" lib = merlin.properties.path() lib.doc", "of the all build products, both final and intermediate disposables", "# the manager of intermediate and final build products class", "and final build products class PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"): \"\"\" The manager", "merlin.properties.path() libexec.doc = \"binaries that are meant to be used", "final build products class PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"): \"\"\" The manager of", "# (c) 1998-2021 all rights reserved # support import merlin", "merlin.properties.path() bin.doc = \"the location of executables\" config = merlin.properties.path()", "build products, both final and intermediate disposables \"\"\" # required", "location of executables\" config = merlin.properties.path() config.doc = \"global package", "= merlin.properties.path() lib.doc = \"libraries\" libexec = merlin.properties.path() libexec.doc =", "Specify the default implementation \"\"\" # choose the default implementer", "= \"libraries\" libexec = merlin.properties.path() libexec.doc = \"binaries that are", "@classmethod def pyre_default(cls, **kwds): \"\"\" Specify the default implementation \"\"\"", "# choose the default implementer return merlin.components.fhs # end of", "documentation\" etc = merlin.properties.path() etc.doc = \"host specific files\" include", "var.doc = \"runtime files\" # framework hooks @classmethod def pyre_default(cls,", "include = merlin.properties.path() include.doc = \"library header files\" lib =", "of intermediate and final build products class PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"): \"\"\"", "(c) 1998-2021 all rights reserved # support import merlin #", "doc = merlin.properties.path() doc.doc = \"package documentation\" etc = merlin.properties.path()", "that are meant to be used by other packages\" share", "= \"runtime files\" # framework hooks @classmethod def pyre_default(cls, **kwds):", "PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"): \"\"\" The manager of the all build products,", "the all build products, both final and intermediate disposables \"\"\"", "both final and intermediate disposables \"\"\" # required state bin", "\"\"\" Specify the default implementation \"\"\" # choose the default", "import merlin # the manager of intermediate and final build", "= merlin.properties.path() etc.doc = \"host specific files\" include = merlin.properties.path()", "bin = merlin.properties.path() bin.doc = \"the location of executables\" config", "# support import merlin # the manager of intermediate and", "package files\" var = merlin.properties.path() var.doc = \"runtime files\" #", "files\" lib = merlin.properties.path() lib.doc = \"libraries\" libexec = merlin.properties.path()", "package configuration files\" doc = merlin.properties.path() doc.doc = \"package documentation\"", "# -*- coding: utf-8 -*- # # <NAME> <<EMAIL>> #", "= \"library header files\" lib = merlin.properties.path() lib.doc = \"libraries\"", "reserved # support import merlin # the manager of intermediate", "\"package documentation\" etc = merlin.properties.path() etc.doc = \"host specific files\"", "= merlin.properties.path() include.doc = \"library header files\" lib = merlin.properties.path()", "files\" include = merlin.properties.path() include.doc = \"library header files\" lib", "all build products, both final and intermediate disposables \"\"\" #", "packages\" share = merlin.properties.path() share.doc = \"architecture independent package files\"", "-*- # # <NAME> <<EMAIL>> # (c) 1998-2021 all rights", "bin.doc = \"the location of executables\" config = merlin.properties.path() config.doc", "libexec.doc = \"binaries that are meant to be used by", "= \"the location of executables\" config = merlin.properties.path() config.doc =", "= merlin.properties.path() doc.doc = \"package documentation\" etc = merlin.properties.path() etc.doc", "the default implementation \"\"\" # choose the default implementer return", "share = merlin.properties.path() share.doc = \"architecture independent package files\" var", "# required state bin = merlin.properties.path() bin.doc = \"the location", "= \"package documentation\" etc = merlin.properties.path() etc.doc = \"host specific", "\"libraries\" libexec = merlin.properties.path() libexec.doc = \"binaries that are meant", "= merlin.properties.path() libexec.doc = \"binaries that are meant to be", "final and intermediate disposables \"\"\" # required state bin =", "and intermediate disposables \"\"\" # required state bin = merlin.properties.path()", "= merlin.properties.path() share.doc = \"architecture independent package files\" var =", "= merlin.properties.path() var.doc = \"runtime files\" # framework hooks @classmethod", "= merlin.properties.path() config.doc = \"global package configuration files\" doc =", "<<EMAIL>> # (c) 1998-2021 all rights reserved # support import", "meant to be used by other packages\" share = merlin.properties.path()", "required state bin = merlin.properties.path() bin.doc = \"the location of", "The manager of the all build products, both final and", "= \"host specific files\" include = merlin.properties.path() include.doc = \"library", "utf-8 -*- # # <NAME> <<EMAIL>> # (c) 1998-2021 all", "lib = merlin.properties.path() lib.doc = \"libraries\" libexec = merlin.properties.path() libexec.doc", "by other packages\" share = merlin.properties.path() share.doc = \"architecture independent", "manager of the all build products, both final and intermediate", "\"\"\" # required state bin = merlin.properties.path() bin.doc = \"the", "products, both final and intermediate disposables \"\"\" # required state", "default implementation \"\"\" # choose the default implementer return merlin.components.fhs", "are meant to be used by other packages\" share =", "products class PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"): \"\"\" The manager of the all", "rights reserved # support import merlin # the manager of", "framework hooks @classmethod def pyre_default(cls, **kwds): \"\"\" Specify the default", "var = merlin.properties.path() var.doc = \"runtime files\" # framework hooks", "= merlin.properties.path() bin.doc = \"the location of executables\" config =", "be used by other packages\" share = merlin.properties.path() share.doc =", "share.doc = \"architecture independent package files\" var = merlin.properties.path() var.doc", "family=\"merlin.layouts.prefix\"): \"\"\" The manager of the all build products, both", "\"\"\" # choose the default implementer return merlin.components.fhs # end", "coding: utf-8 -*- # # <NAME> <<EMAIL>> # (c) 1998-2021", "hooks @classmethod def pyre_default(cls, **kwds): \"\"\" Specify the default implementation", "config = merlin.properties.path() config.doc = \"global package configuration files\" doc", "libexec = merlin.properties.path() libexec.doc = \"binaries that are meant to", "\"binaries that are meant to be used by other packages\"", "manager of intermediate and final build products class PrefixLayout(merlin.protocol, family=\"merlin.layouts.prefix\"):", "doc.doc = \"package documentation\" etc = merlin.properties.path() etc.doc = \"host", "lib.doc = \"libraries\" libexec = merlin.properties.path() libexec.doc = \"binaries that", "etc = merlin.properties.path() etc.doc = \"host specific files\" include =", "\"architecture independent package files\" var = merlin.properties.path() var.doc = \"runtime", "merlin # the manager of intermediate and final build products", "other packages\" share = merlin.properties.path() share.doc = \"architecture independent package", "# framework hooks @classmethod def pyre_default(cls, **kwds): \"\"\" Specify the", "pyre_default(cls, **kwds): \"\"\" Specify the default implementation \"\"\" # choose", "-*- coding: utf-8 -*- # # <NAME> <<EMAIL>> # (c)", "**kwds): \"\"\" Specify the default implementation \"\"\" # choose the", "executables\" config = merlin.properties.path() config.doc = \"global package configuration files\"", "= \"binaries that are meant to be used by other", "files\" var = merlin.properties.path() var.doc = \"runtime files\" # framework", "support import merlin # the manager of intermediate and final", "merlin.properties.path() config.doc = \"global package configuration files\" doc = merlin.properties.path()", "of executables\" config = merlin.properties.path() config.doc = \"global package configuration", "= \"global package configuration files\" doc = merlin.properties.path() doc.doc =", "merlin.properties.path() var.doc = \"runtime files\" # framework hooks @classmethod def", "\"the location of executables\" config = merlin.properties.path() config.doc = \"global", "merlin.properties.path() lib.doc = \"libraries\" libexec = merlin.properties.path() libexec.doc = \"binaries" ]
[ "# promote products derived from this software without specific prior", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY,", "met: # # * Redistributions of source code must retain", "to endorse or # promote products derived from this software", "materials provided with the distribution. # # * Neither the", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF #", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "and/or other materials provided with the distribution. # # *", "used to endorse or # promote products derived from this", "OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR", "OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import maya.cmds", "# other contributors to this software may be used to", ").read() toMaya = IECoreMaya.ToMayaImageConverter( imageA ) mImage = maya.OpenMaya.MImage() toMaya.convert(", "# IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "in the # documentation and/or other materials provided with the", "or without # modification, are permitted provided that the following", "of conditions and the following disclaimer. # # * Redistributions", "# * Redistributions in binary form must reproduce the above", "# # Redistribution and use in source and binary forms,", "Redistribution and use in source and binary forms, with or", "and use in source and binary forms, with or without", "this list of conditions and the following disclaimer. # #", "notice, this list of conditions and the following disclaimer in", "the following disclaimer in the # documentation and/or other materials", "the distribution. # # * Neither the name of Image", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "IECoreImage import IECoreMaya class ImageConverterTest( IECoreMaya.TestCase ) : def test(", "fromMaya.convert() self.assertFalse( IECoreImage.ImageDiffOp()( imageA=imageA, imageB=imageB, maxError=1.0/256 ).value ) if __name__", "provided that the following conditions are # met: # #", "endorse or # promote products derived from this software without", "All rights reserved. # # Redistribution and use in source", "with the distribution. # # * Neither the name of", "# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE", "without # modification, are permitted provided that the following conditions", "retain the above copyright # notice, this list of conditions", "IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read() toMaya = IECoreMaya.ToMayaImageConverter( imageA ) mImage =", "mImage ) fromMaya = IECoreMaya.FromMayaImageConverter( mImage ) imageB = fromMaya.convert()", "# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "in binary form must reproduce the above copyright # notice,", "PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE", "OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", ") : imageA = IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read() toMaya = IECoreMaya.ToMayaImageConverter(", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "# notice, this list of conditions and the following disclaimer", ": imageA = IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read() toMaya = IECoreMaya.ToMayaImageConverter( imageA", "of Image Engine Design nor the names of any #", "DAMAGE. # ########################################################################## import maya.cmds import IECore import IECoreImage import", "are # met: # # * Redistributions of source code", "this list of conditions and the following disclaimer in the", "# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "<reponame>bradleyhenke/cortex ########################################################################## # # Copyright (c) 2011, Image Engine Design", "rights reserved. # # Redistribution and use in source and", "contributors to this software may be used to endorse or", "conditions are # met: # # * Redistributions of source", "binary form must reproduce the above copyright # notice, this", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #", "Neither the name of Image Engine Design nor the names", "promote products derived from this software without specific prior #", "HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER", "documentation and/or other materials provided with the distribution. # #", "imageA ) mImage = maya.OpenMaya.MImage() toMaya.convert( mImage ) fromMaya =", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, #", "AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN", "modification, are permitted provided that the following conditions are #", "# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN", "list of conditions and the following disclaimer in the #", "that the following conditions are # met: # # *", "THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF", "Engine Design Inc. All rights reserved. # # Redistribution and", "Image Engine Design nor the names of any # other", "########################################################################## import maya.cmds import IECore import IECoreImage import IECoreMaya class", "forms, with or without # modification, are permitted provided that", "name of Image Engine Design nor the names of any", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR #", "TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "binary forms, with or without # modification, are permitted provided", "# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "IECoreMaya.TestCase ) : def test( self ) : imageA =", "# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "reserved. # # Redistribution and use in source and binary", "2011, Image Engine Design Inc. All rights reserved. # #", "copyright # notice, this list of conditions and the following", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR", "disclaimer in the # documentation and/or other materials provided with", "# # Copyright (c) 2011, Image Engine Design Inc. All", "source and binary forms, with or without # modification, are", "software may be used to endorse or # promote products", "IECoreMaya.FromMayaImageConverter( mImage ) imageB = fromMaya.convert() self.assertFalse( IECoreImage.ImageDiffOp()( imageA=imageA, imageB=imageB,", "the following conditions are # met: # # * Redistributions", "OUT OF THE USE OF THIS # SOFTWARE, EVEN IF", "# Copyright (c) 2011, Image Engine Design Inc. All rights", "BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY", "ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "following conditions are # met: # # * Redistributions of", "LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE", "Design Inc. All rights reserved. # # Redistribution and use", "above copyright # notice, this list of conditions and the", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING #", "def test( self ) : imageA = IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read()", "Redistributions of source code must retain the above copyright #", "ImageConverterTest( IECoreMaya.TestCase ) : def test( self ) : imageA", "maya.cmds import IECore import IECoreImage import IECoreMaya class ImageConverterTest( IECoreMaya.TestCase", "########################################################################## # # Copyright (c) 2011, Image Engine Design Inc.", "# met: # # * Redistributions of source code must", "in source and binary forms, with or without # modification,", "IN ANY WAY OUT OF THE USE OF THIS #", "# documentation and/or other materials provided with the distribution. #", "the names of any # other contributors to this software", "OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "SUCH DAMAGE. # ########################################################################## import maya.cmds import IECore import IECoreImage", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY,", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\"", "ANY WAY OUT OF THE USE OF THIS # SOFTWARE,", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF", "USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "= fromMaya.convert() self.assertFalse( IECoreImage.ImageDiffOp()( imageA=imageA, imageB=imageB, maxError=1.0/256 ).value ) if", "list of conditions and the following disclaimer. # # *", "the above copyright # notice, this list of conditions and", ") imageB = fromMaya.convert() self.assertFalse( IECoreImage.ImageDiffOp()( imageA=imageA, imageB=imageB, maxError=1.0/256 ).value", "# * Redistributions of source code must retain the above", "self.assertFalse( IECoreImage.ImageDiffOp()( imageA=imageA, imageB=imageB, maxError=1.0/256 ).value ) if __name__ ==", "NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE", "# ########################################################################## import maya.cmds import IECore import IECoreImage import IECoreMaya", "must reproduce the above copyright # notice, this list of", "LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION)", "mImage ) imageB = fromMaya.convert() self.assertFalse( IECoreImage.ImageDiffOp()( imageA=imageA, imageB=imageB, maxError=1.0/256", "IECore import IECoreImage import IECoreMaya class ImageConverterTest( IECoreMaya.TestCase ) :", "ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", "CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "mImage = maya.OpenMaya.MImage() toMaya.convert( mImage ) fromMaya = IECoreMaya.FromMayaImageConverter( mImage", "import IECoreImage import IECoreMaya class ImageConverterTest( IECoreMaya.TestCase ) : def", "NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS;", "TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "of any # other contributors to this software may be", "FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO", "ARISING IN ANY WAY OUT OF THE USE OF THIS", "= IECoreMaya.FromMayaImageConverter( mImage ) imageB = fromMaya.convert() self.assertFalse( IECoreImage.ImageDiffOp()( imageA=imageA,", "# # * Neither the name of Image Engine Design", "and binary forms, with or without # modification, are permitted", "imageB = fromMaya.convert() self.assertFalse( IECoreImage.ImageDiffOp()( imageA=imageA, imageB=imageB, maxError=1.0/256 ).value )", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE", "distribution. # # * Neither the name of Image Engine", "THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import maya.cmds import", "OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "# written permission. # # THIS SOFTWARE IS PROVIDED BY", "other contributors to this software may be used to endorse", "use in source and binary forms, with or without #", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR #", "\"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "form must reproduce the above copyright # notice, this list", "OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "any # other contributors to this software may be used", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", ") : def test( self ) : imageA = IECore.Reader.create(", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "\"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read() toMaya = IECoreMaya.ToMayaImageConverter( imageA ) mImage = maya.OpenMaya.MImage()", "# Redistribution and use in source and binary forms, with", "* Neither the name of Image Engine Design nor the", "are permitted provided that the following conditions are # met:", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE", "this software may be used to endorse or # promote", "or # promote products derived from this software without specific", "THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "Engine Design nor the names of any # other contributors", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "code must retain the above copyright # notice, this list", "from this software without specific prior # written permission. #", "CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE)", "POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import maya.cmds import IECore", "to this software may be used to endorse or #", "the # documentation and/or other materials provided with the distribution.", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS", "THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "toMaya.convert( mImage ) fromMaya = IECoreMaya.FromMayaImageConverter( mImage ) imageB =", "Redistributions in binary form must reproduce the above copyright #", "maya.OpenMaya.MImage() toMaya.convert( mImage ) fromMaya = IECoreMaya.FromMayaImageConverter( mImage ) imageB", "NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL", "A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "OF SUCH DAMAGE. # ########################################################################## import maya.cmds import IECore import", "test( self ) : imageA = IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read() toMaya", "notice, this list of conditions and the following disclaimer. #", "* Redistributions of source code must retain the above copyright", "OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "permitted provided that the following conditions are # met: #", "WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN", "and the following disclaimer. # # * Redistributions in binary", "other materials provided with the distribution. # # * Neither", "import IECoreMaya class ImageConverterTest( IECoreMaya.TestCase ) : def test( self", "reproduce the above copyright # notice, this list of conditions", "class ImageConverterTest( IECoreMaya.TestCase ) : def test( self ) :", "toMaya = IECoreMaya.ToMayaImageConverter( imageA ) mImage = maya.OpenMaya.MImage() toMaya.convert( mImage", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR #", "= maya.OpenMaya.MImage() toMaya.convert( mImage ) fromMaya = IECoreMaya.FromMayaImageConverter( mImage )", "Inc. All rights reserved. # # Redistribution and use in", "# modification, are permitted provided that the following conditions are", "the following disclaimer. # # * Redistributions in binary form", "INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "IECoreImage.ImageDiffOp()( imageA=imageA, imageB=imageB, maxError=1.0/256 ).value ) if __name__ == \"__main__\":", "source code must retain the above copyright # notice, this", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT", "Image Engine Design Inc. All rights reserved. # # Redistribution", ") fromMaya = IECoreMaya.FromMayaImageConverter( mImage ) imageB = fromMaya.convert() self.assertFalse(", "IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR", "must retain the above copyright # notice, this list of", "* Redistributions in binary form must reproduce the above copyright", "conditions and the following disclaimer in the # documentation and/or", "derived from this software without specific prior # written permission.", "AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT,", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "software without specific prior # written permission. # # THIS", "STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER", "prior # written permission. # # THIS SOFTWARE IS PROVIDED", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY", "OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, #", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND", "# # * Redistributions of source code must retain the", "# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED", "fromMaya = IECoreMaya.FromMayaImageConverter( mImage ) imageB = fromMaya.convert() self.assertFalse( IECoreImage.ImageDiffOp()(", "= IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read() toMaya = IECoreMaya.ToMayaImageConverter( imageA ) mImage", "following disclaimer in the # documentation and/or other materials provided", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "may be used to endorse or # promote products derived", "with or without # modification, are permitted provided that the", "# notice, this list of conditions and the following disclaimer.", ") mImage = maya.OpenMaya.MImage() toMaya.convert( mImage ) fromMaya = IECoreMaya.FromMayaImageConverter(", "names of any # other contributors to this software may", "be used to endorse or # promote products derived from", "(c) 2011, Image Engine Design Inc. All rights reserved. #", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ##########################################################################", "# # * Redistributions in binary form must reproduce the", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS #", "AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED", "FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT", "COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "(INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "disclaimer. # # * Redistributions in binary form must reproduce", "SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "of source code must retain the above copyright # notice,", "following disclaimer. # # * Redistributions in binary form must", "of conditions and the following disclaimer in the # documentation", "the name of Image Engine Design nor the names of", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE", "# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR", "import maya.cmds import IECore import IECoreImage import IECoreMaya class ImageConverterTest(", ": def test( self ) : imageA = IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\"", "self ) : imageA = IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read() toMaya =", "without specific prior # written permission. # # THIS SOFTWARE", "import IECore import IECoreImage import IECoreMaya class ImageConverterTest( IECoreMaya.TestCase )", "this software without specific prior # written permission. # #", "TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "imageA = IECore.Reader.create( \"test/IECoreImage/data/exr/colorBarsWithAlpha.exr\" ).read() toMaya = IECoreMaya.ToMayaImageConverter( imageA )", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, #", "Copyright (c) 2011, Image Engine Design Inc. All rights reserved.", "(INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS", "HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR", "products derived from this software without specific prior # written", "LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "provided with the distribution. # # * Neither the name", "IECoreMaya.ToMayaImageConverter( imageA ) mImage = maya.OpenMaya.MImage() toMaya.convert( mImage ) fromMaya", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED", "specific prior # written permission. # # THIS SOFTWARE IS", "and the following disclaimer in the # documentation and/or other", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED.", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS", "# * Neither the name of Image Engine Design nor", "Design nor the names of any # other contributors to", "IECoreMaya class ImageConverterTest( IECoreMaya.TestCase ) : def test( self )", "INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF", "imageA=imageA, imageB=imageB, maxError=1.0/256 ).value ) if __name__ == \"__main__\": IECoreMaya.TestProgram()", "nor the names of any # other contributors to this", "conditions and the following disclaimer. # # * Redistributions in", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR", "= IECoreMaya.ToMayaImageConverter( imageA ) mImage = maya.OpenMaya.MImage() toMaya.convert( mImage )" ]
[ "def run_checks(): cleanup() trainer = instantiate_multinode_ddp_if_possible() model = setup_model(trainer) check_model_ranks(model)", "torch.utils.data.DataLoader(dataset, batch_size=2) def forward(self, batch): return batch.mean() def validation_step(self, batch,", "NVIDIA CORPORATION. All rights reserved. # # Licensed under the", "= f.readlines() texts = [t.replace(\"\\n\", \"\") for t in texts]", "to trainer.global_rank !\") exit(1) if log_world_size != world_size: print(\"Logged world", "2.0 (the \"License\"); # you may not use this file", "limitations under the License. import os import shutil import torch", "use_datetime_version=False, version=\"\") exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg)) return trainer def setup_model(trainer: Trainer): model", "**kwargs): cfg = OmegaConf.structured({}) super().__init__(cfg, trainer=kwargs.get('trainer', None)) # dummy parameter", "allow DDP to execute self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1) def train_dataloader(self):", "list_available_models(self): pass def setup_training_data(self): pass def setup_validation_data(self): pass def validation_epoch_end(self,", "None def val_dataloader(self): return None def predict_dataloader(self): dataset = OnesDataset(2)", "instantiate_multinode_ddp_if_possible() model = setup_model(trainer) check_model_ranks(model) print(\"DDP checks passed !\") cleanup()", "ExampleModel(ModelPT): def __init__(self, *args, **kwargs): cfg = OmegaConf.structured({}) super().__init__(cfg, trainer=kwargs.get('trainer',", "model def get_rank_info(texts: list, rank_key: str) -> int: for line", "def __getitem__(self, *args): return torch.ones(2) def __len__(self): return self.__dataset_len class", "trainer.global_rank !\") exit(1) if log_world_size != world_size: print(\"Logged world size", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "os.path.join('./ddp_check/', 'default', 'version_0') file_template = \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size = torch.cuda.device_count() for", "out_features=1) def train_dataloader(self): return None def val_dataloader(self): return None def", "parameter in order to allow DDP to execute self.l1 =", "model = ExampleModel(trainer=trainer) logging.info(f\"M.Global Rank:{model.global_rank}\") logging.info(f\"M.Local Rank:{model.local_rank}\") logging.info(f\"M.World Size:{model.trainer.world_size}\") trainer.predict(model)", "from nemo.utils import logging from nemo.utils.exp_manager import ExpManagerConfig, exp_manager class", "setup_validation_data(self): pass def validation_epoch_end(self, loss): self.log(\"val_loss\", torch.stack(loss).mean()) def instantiate_multinode_ddp_if_possible(): num_gpus", "if rank_key in line: rank_value = line.split(\":\")[-1] rank_value = int(rank_value)", "rank: print(\"Logged global rank is not equal to trainer.global_rank !\")", "use this file except in compliance with the License. #", "torch.cuda.device_count() for rank in range(world_size): filename = file_template.format(rank=rank) filepath =", "reserved. # # Licensed under the Apache License, Version 2.0", "torch.cuda.device_count() trainer = Trainer(gpus=num_gpus, accelerator='ddp', logger=None, checkpoint_callback=None) exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/',", "validation_step(self, batch, batch_idx): return self(batch) def training_step(self, batch, batch_idx): return", "for line in texts: if rank_key in line: rank_value =", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "and # limitations under the License. import os import shutil", "key !\") exit(1) @rank_zero_only def check_model_ranks(model: ExampleModel): basedir = os.path.join('./ddp_check/',", "cleanup(): if os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check', ignore_errors=True) def run_checks(): cleanup() trainer =", "License. # You may obtain a copy of the License", "\"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size = torch.cuda.device_count() for rank in range(world_size): filename =", "to execute self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1) def train_dataloader(self): return None", "under the License is distributed on an \"AS IS\" BASIS,", "rank_key: str) -> int: for line in texts: if rank_key", "self.log(\"val_loss\", torch.stack(loss).mean()) def instantiate_multinode_ddp_if_possible(): num_gpus = torch.cuda.device_count() trainer = Trainer(gpus=num_gpus,", "License for the specific language governing permissions and # limitations", "torch from omegaconf import OmegaConf from pytorch_lightning import Trainer from", "self(batch) def training_step(self, batch, batch_idx): return self(batch) def list_available_models(self): pass", "def validation_epoch_end(self, loss): self.log(\"val_loss\", torch.stack(loss).mean()) def instantiate_multinode_ddp_if_possible(): num_gpus = torch.cuda.device_count()", "OmegaConf from pytorch_lightning import Trainer from pytorch_lightning.utilities.distributed import rank_zero_only from", "if log_global_rank != rank: print(\"Logged global rank is not equal", "in order to allow DDP to execute self.l1 = torch.nn.modules.Linear(in_features=2,", "execute self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1) def train_dataloader(self): return None def", "= ExampleModel(trainer=trainer) logging.info(f\"M.Global Rank:{model.global_rank}\") logging.info(f\"M.Local Rank:{model.local_rank}\") logging.info(f\"M.World Size:{model.trainer.world_size}\") trainer.predict(model) return", "the License. import os import shutil import torch from omegaconf", "setup_training_data(self): pass def setup_validation_data(self): pass def validation_epoch_end(self, loss): self.log(\"val_loss\", torch.stack(loss).mean())", "loss): self.log(\"val_loss\", torch.stack(loss).mean()) def instantiate_multinode_ddp_if_possible(): num_gpus = torch.cuda.device_count() trainer =", "return None def val_dataloader(self): return None def predict_dataloader(self): dataset =", "rank_value print(\"Could not find the correct rank key !\") exit(1)", "Trainer from pytorch_lightning.utilities.distributed import rank_zero_only from nemo.core import ModelPT from", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "import OmegaConf from pytorch_lightning import Trainer from pytorch_lightning.utilities.distributed import rank_zero_only", "to allow DDP to execute self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1) def", "= get_rank_info(texts, rank_key='M.Global Rank') log_world_size = get_rank_info(texts, rank_key='M.World Size') if", "not find the correct rank key !\") exit(1) @rank_zero_only def", "correct rank key !\") exit(1) @rank_zero_only def check_model_ranks(model: ExampleModel): basedir", "__init__(self, dataset_len): super().__init__() self.__dataset_len = dataset_len def __getitem__(self, *args): return", "from omegaconf import OmegaConf from pytorch_lightning import Trainer from pytorch_lightning.utilities.distributed", "log_global_rank != rank: print(\"Logged global rank is not equal to", "print(\"Logged global rank is not equal to trainer.global_rank !\") exit(1)", "batch_idx): return self(batch) def training_step(self, batch, batch_idx): return self(batch) def", "int(rank_value) return rank_value print(\"Could not find the correct rank key", "'r') as f: texts = f.readlines() texts = [t.replace(\"\\n\", \"\")", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "to in writing, software # distributed under the License is", "= Trainer(gpus=num_gpus, accelerator='ddp', logger=None, checkpoint_callback=None) exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version=\"\")", "# See the License for the specific language governing permissions", "Rank') log_world_size = get_rank_info(texts, rank_key='M.World Size') if log_global_rank != rank:", "pytorch_lightning import Trainer from pytorch_lightning.utilities.distributed import rank_zero_only from nemo.core import", "batch, batch_idx): return self(batch) def training_step(self, batch, batch_idx): return self(batch)", "or agreed to in writing, software # distributed under the", "val_dataloader(self): return None def predict_dataloader(self): dataset = OnesDataset(2) return torch.utils.data.DataLoader(dataset,", "checkpoint_callback=None) exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version=\"\") exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg)) return trainer", "required by applicable law or agreed to in writing, software", "def instantiate_multinode_ddp_if_possible(): num_gpus = torch.cuda.device_count() trainer = Trainer(gpus=num_gpus, accelerator='ddp', logger=None,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "return torch.ones(2) def __len__(self): return self.__dataset_len class ExampleModel(ModelPT): def __init__(self,", "with the License. # You may obtain a copy of", "batch_size=2) def forward(self, batch): return batch.mean() def validation_step(self, batch, batch_idx):", "equal to trainer.world_size !\") exit(1) @rank_zero_only def cleanup(): if os.path.exists('./ddp_check'):", "# dummy parameter in order to allow DDP to execute", "os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check', ignore_errors=True) def run_checks(): cleanup() trainer = instantiate_multinode_ddp_if_possible() model", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "distributed under the License is distributed on an \"AS IS\"", "nemo.core import ModelPT from nemo.utils import logging from nemo.utils.exp_manager import", "def cleanup(): if os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check', ignore_errors=True) def run_checks(): cleanup() trainer", "def get_rank_info(texts: list, rank_key: str) -> int: for line in", "from nemo.utils.exp_manager import ExpManagerConfig, exp_manager class OnesDataset(torch.utils.data.Dataset): def __init__(self, dataset_len):", "predict_dataloader(self): dataset = OnesDataset(2) return torch.utils.data.DataLoader(dataset, batch_size=2) def forward(self, batch):", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "as f: texts = f.readlines() texts = [t.replace(\"\\n\", \"\") for", "*args): return torch.ones(2) def __len__(self): return self.__dataset_len class ExampleModel(ModelPT): def", "ModelPT from nemo.utils import logging from nemo.utils.exp_manager import ExpManagerConfig, exp_manager", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "exit(1) @rank_zero_only def cleanup(): if os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check', ignore_errors=True) def run_checks():", "writing, software # distributed under the License is distributed on", "# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. #", "from nemo.core import ModelPT from nemo.utils import logging from nemo.utils.exp_manager", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "from pytorch_lightning.utilities.distributed import rank_zero_only from nemo.core import ModelPT from nemo.utils", "= get_rank_info(texts, rank_key='M.World Size') if log_global_rank != rank: print(\"Logged global", "logging.info(f\"M.Local Rank:{model.local_rank}\") logging.info(f\"M.World Size:{model.trainer.world_size}\") trainer.predict(model) return model def get_rank_info(texts: list,", "train_dataloader(self): return None def val_dataloader(self): return None def predict_dataloader(self): dataset", "texts = f.readlines() texts = [t.replace(\"\\n\", \"\") for t in", "CONDITIONS OF ANY KIND, either express or implied. # See", "License. import os import shutil import torch from omegaconf import", "permissions and # limitations under the License. import os import", "texts] log_global_rank = get_rank_info(texts, rank_key='M.Global Rank') log_world_size = get_rank_info(texts, rank_key='M.World", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "order to allow DDP to execute self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1)", "return trainer def setup_model(trainer: Trainer): model = ExampleModel(trainer=trainer) logging.info(f\"M.Global Rank:{model.global_rank}\")", "torch.stack(loss).mean()) def instantiate_multinode_ddp_if_possible(): num_gpus = torch.cuda.device_count() trainer = Trainer(gpus=num_gpus, accelerator='ddp',", "for t in texts] log_global_rank = get_rank_info(texts, rank_key='M.Global Rank') log_world_size", "rank_key='M.World Size') if log_global_rank != rank: print(\"Logged global rank is", "ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version=\"\") exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg)) return trainer def setup_model(trainer: Trainer):", "None)) # dummy parameter in order to allow DDP to", "cfg=OmegaConf.structured(exp_manager_cfg)) return trainer def setup_model(trainer: Trainer): model = ExampleModel(trainer=trainer) logging.info(f\"M.Global", "filename = file_template.format(rank=rank) filepath = os.path.join(basedir, filename) with open(filepath, 'r')", "OR CONDITIONS OF ANY KIND, either express or implied. #", "governing permissions and # limitations under the License. import os", "Rank:{model.global_rank}\") logging.info(f\"M.Local Rank:{model.local_rank}\") logging.info(f\"M.World Size:{model.trainer.world_size}\") trainer.predict(model) return model def get_rank_info(texts:", "the License is distributed on an \"AS IS\" BASIS, #", "!\") exit(1) if log_world_size != world_size: print(\"Logged world size if", "int: for line in texts: if rank_key in line: rank_value", "ignore_errors=True) def run_checks(): cleanup() trainer = instantiate_multinode_ddp_if_possible() model = setup_model(trainer)", "in texts: if rank_key in line: rank_value = line.split(\":\")[-1] rank_value", "= torch.cuda.device_count() trainer = Trainer(gpus=num_gpus, accelerator='ddp', logger=None, checkpoint_callback=None) exp_manager_cfg =", "class ExampleModel(ModelPT): def __init__(self, *args, **kwargs): cfg = OmegaConf.structured({}) super().__init__(cfg,", "torch.nn.modules.Linear(in_features=2, out_features=1) def train_dataloader(self): return None def val_dataloader(self): return None", "import Trainer from pytorch_lightning.utilities.distributed import rank_zero_only from nemo.core import ModelPT", "os.path.join(basedir, filename) with open(filepath, 'r') as f: texts = f.readlines()", "model = setup_model(trainer) check_model_ranks(model) print(\"DDP checks passed !\") cleanup() if", "from pytorch_lightning import Trainer from pytorch_lightning.utilities.distributed import rank_zero_only from nemo.core", "log_global_rank = get_rank_info(texts, rank_key='M.Global Rank') log_world_size = get_rank_info(texts, rank_key='M.World Size')", "# limitations under the License. import os import shutil import", "pytorch_lightning.utilities.distributed import rank_zero_only from nemo.core import ModelPT from nemo.utils import", "get_rank_info(texts: list, rank_key: str) -> int: for line in texts:", "rank in range(world_size): filename = file_template.format(rank=rank) filepath = os.path.join(basedir, filename)", "not equal to trainer.global_rank !\") exit(1) if log_world_size != world_size:", "logger=None, checkpoint_callback=None) exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version=\"\") exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg)) return", "law or agreed to in writing, software # distributed under", "pass def setup_training_data(self): pass def setup_validation_data(self): pass def validation_epoch_end(self, loss):", "accelerator='ddp', logger=None, checkpoint_callback=None) exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version=\"\") exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg))", "OnesDataset(torch.utils.data.Dataset): def __init__(self, dataset_len): super().__init__() self.__dataset_len = dataset_len def __getitem__(self,", "return model def get_rank_info(texts: list, rank_key: str) -> int: for", "@rank_zero_only def cleanup(): if os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check', ignore_errors=True) def run_checks(): cleanup()", "self(batch) def list_available_models(self): pass def setup_training_data(self): pass def setup_validation_data(self): pass", "filename) with open(filepath, 'r') as f: texts = f.readlines() texts", "import logging from nemo.utils.exp_manager import ExpManagerConfig, exp_manager class OnesDataset(torch.utils.data.Dataset): def", "import ModelPT from nemo.utils import logging from nemo.utils.exp_manager import ExpManagerConfig,", "Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # #", "if not equal to trainer.world_size !\") exit(1) @rank_zero_only def cleanup():", "open(filepath, 'r') as f: texts = f.readlines() texts = [t.replace(\"\\n\",", "t in texts] log_global_rank = get_rank_info(texts, rank_key='M.Global Rank') log_world_size =", "batch): return batch.mean() def validation_step(self, batch, batch_idx): return self(batch) def", "super().__init__() self.__dataset_len = dataset_len def __getitem__(self, *args): return torch.ones(2) def", "may obtain a copy of the License at # #", "trainer.predict(model) return model def get_rank_info(texts: list, rank_key: str) -> int:", "None def predict_dataloader(self): dataset = OnesDataset(2) return torch.utils.data.DataLoader(dataset, batch_size=2) def", "line in texts: if rank_key in line: rank_value = line.split(\":\")[-1]", "is not equal to trainer.global_rank !\") exit(1) if log_world_size !=", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "def training_step(self, batch, batch_idx): return self(batch) def list_available_models(self): pass def", "OnesDataset(2) return torch.utils.data.DataLoader(dataset, batch_size=2) def forward(self, batch): return batch.mean() def", "get_rank_info(texts, rank_key='M.World Size') if log_global_rank != rank: print(\"Logged global rank", "may not use this file except in compliance with the", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "this file except in compliance with the License. # You", "setup_model(trainer: Trainer): model = ExampleModel(trainer=trainer) logging.info(f\"M.Global Rank:{model.global_rank}\") logging.info(f\"M.Local Rank:{model.local_rank}\") logging.info(f\"M.World", "import shutil import torch from omegaconf import OmegaConf from pytorch_lightning", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "!\") exit(1) @rank_zero_only def check_model_ranks(model: ExampleModel): basedir = os.path.join('./ddp_check/', 'default',", "!\") exit(1) @rank_zero_only def cleanup(): if os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check', ignore_errors=True) def", "in line: rank_value = line.split(\":\")[-1] rank_value = int(rank_value) return rank_value", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "def val_dataloader(self): return None def predict_dataloader(self): dataset = OnesDataset(2) return", "print(\"Could not find the correct rank key !\") exit(1) @rank_zero_only", "if os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check', ignore_errors=True) def run_checks(): cleanup() trainer = instantiate_multinode_ddp_if_possible()", "2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under", "in texts] log_global_rank = get_rank_info(texts, rank_key='M.Global Rank') log_world_size = get_rank_info(texts,", "trainer = instantiate_multinode_ddp_if_possible() model = setup_model(trainer) check_model_ranks(model) print(\"DDP checks passed", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "rights reserved. # # Licensed under the Apache License, Version", "__init__(self, *args, **kwargs): cfg = OmegaConf.structured({}) super().__init__(cfg, trainer=kwargs.get('trainer', None)) #", "ExpManagerConfig, exp_manager class OnesDataset(torch.utils.data.Dataset): def __init__(self, dataset_len): super().__init__() self.__dataset_len =", "rank_value = line.split(\":\")[-1] rank_value = int(rank_value) return rank_value print(\"Could not", "exit(1) if log_world_size != world_size: print(\"Logged world size if not", "language governing permissions and # limitations under the License. import", "def setup_model(trainer: Trainer): model = ExampleModel(trainer=trainer) logging.info(f\"M.Global Rank:{model.global_rank}\") logging.info(f\"M.Local Rank:{model.local_rank}\")", "shutil import torch from omegaconf import OmegaConf from pytorch_lightning import", "= file_template.format(rank=rank) filepath = os.path.join(basedir, filename) with open(filepath, 'r') as", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "rank_key='M.Global Rank') log_world_size = get_rank_info(texts, rank_key='M.World Size') if log_global_rank !=", "world_size = torch.cuda.device_count() for rank in range(world_size): filename = file_template.format(rank=rank)", "str) -> int: for line in texts: if rank_key in", "return torch.utils.data.DataLoader(dataset, batch_size=2) def forward(self, batch): return batch.mean() def validation_step(self,", "in range(world_size): filename = file_template.format(rank=rank) filepath = os.path.join(basedir, filename) with", "or implied. # See the License for the specific language", "return self(batch) def list_available_models(self): pass def setup_training_data(self): pass def setup_validation_data(self):", "= [t.replace(\"\\n\", \"\") for t in texts] log_global_rank = get_rank_info(texts,", "num_gpus = torch.cuda.device_count() trainer = Trainer(gpus=num_gpus, accelerator='ddp', logger=None, checkpoint_callback=None) exp_manager_cfg", "batch, batch_idx): return self(batch) def list_available_models(self): pass def setup_training_data(self): pass", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "-> int: for line in texts: if rank_key in line:", "texts: if rank_key in line: rank_value = line.split(\":\")[-1] rank_value =", "list, rank_key: str) -> int: for line in texts: if", "def predict_dataloader(self): dataset = OnesDataset(2) return torch.utils.data.DataLoader(dataset, batch_size=2) def forward(self,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "def __len__(self): return self.__dataset_len class ExampleModel(ModelPT): def __init__(self, *args, **kwargs):", "= ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version=\"\") exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg)) return trainer def setup_model(trainer:", "training_step(self, batch, batch_idx): return self(batch) def list_available_models(self): pass def setup_training_data(self):", "os import shutil import torch from omegaconf import OmegaConf from", "dataset_len): super().__init__() self.__dataset_len = dataset_len def __getitem__(self, *args): return torch.ones(2)", "not equal to trainer.world_size !\") exit(1) @rank_zero_only def cleanup(): if", "logging from nemo.utils.exp_manager import ExpManagerConfig, exp_manager class OnesDataset(torch.utils.data.Dataset): def __init__(self,", "[t.replace(\"\\n\", \"\") for t in texts] log_global_rank = get_rank_info(texts, rank_key='M.Global", "return self.__dataset_len class ExampleModel(ModelPT): def __init__(self, *args, **kwargs): cfg =", "= \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size = torch.cuda.device_count() for rank in range(world_size): filename", "Trainer(gpus=num_gpus, accelerator='ddp', logger=None, checkpoint_callback=None) exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version=\"\") exp_manager(trainer,", "(the \"License\"); # you may not use this file except", "log_world_size != world_size: print(\"Logged world size if not equal to", "rank key !\") exit(1) @rank_zero_only def check_model_ranks(model: ExampleModel): basedir =", "# you may not use this file except in compliance", "= dataset_len def __getitem__(self, *args): return torch.ones(2) def __len__(self): return", "check_model_ranks(model: ExampleModel): basedir = os.path.join('./ddp_check/', 'default', 'version_0') file_template = \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\"", "= os.path.join(basedir, filename) with open(filepath, 'r') as f: texts =", "super().__init__(cfg, trainer=kwargs.get('trainer', None)) # dummy parameter in order to allow", "= int(rank_value) return rank_value print(\"Could not find the correct rank", "!= world_size: print(\"Logged world size if not equal to trainer.world_size", "the correct rank key !\") exit(1) @rank_zero_only def check_model_ranks(model: ExampleModel):", "# # Unless required by applicable law or agreed to", "logging.info(f\"M.World Size:{model.trainer.world_size}\") trainer.predict(model) return model def get_rank_info(texts: list, rank_key: str)", "DDP to execute self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1) def train_dataloader(self): return", "trainer = Trainer(gpus=num_gpus, accelerator='ddp', logger=None, checkpoint_callback=None) exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False,", "rank_value = int(rank_value) return rank_value print(\"Could not find the correct", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "def __init__(self, *args, **kwargs): cfg = OmegaConf.structured({}) super().__init__(cfg, trainer=kwargs.get('trainer', None))", "Version 2.0 (the \"License\"); # you may not use this", "with open(filepath, 'r') as f: texts = f.readlines() texts =", "class OnesDataset(torch.utils.data.Dataset): def __init__(self, dataset_len): super().__init__() self.__dataset_len = dataset_len def", "file_template = \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size = torch.cuda.device_count() for rank in range(world_size):", "implied. # See the License for the specific language governing", "world_size: print(\"Logged world size if not equal to trainer.world_size !\")", "under the Apache License, Version 2.0 (the \"License\"); # you", "f.readlines() texts = [t.replace(\"\\n\", \"\") for t in texts] log_global_rank", "pass def setup_validation_data(self): pass def validation_epoch_end(self, loss): self.log(\"val_loss\", torch.stack(loss).mean()) def", "return self(batch) def training_step(self, batch, batch_idx): return self(batch) def list_available_models(self):", "def setup_training_data(self): pass def setup_validation_data(self): pass def validation_epoch_end(self, loss): self.log(\"val_loss\",", "Size:{model.trainer.world_size}\") trainer.predict(model) return model def get_rank_info(texts: list, rank_key: str) ->", "torch.ones(2) def __len__(self): return self.__dataset_len class ExampleModel(ModelPT): def __init__(self, *args,", "trainer def setup_model(trainer: Trainer): model = ExampleModel(trainer=trainer) logging.info(f\"M.Global Rank:{model.global_rank}\") logging.info(f\"M.Local", "validation_epoch_end(self, loss): self.log(\"val_loss\", torch.stack(loss).mean()) def instantiate_multinode_ddp_if_possible(): num_gpus = torch.cuda.device_count() trainer", "f: texts = f.readlines() texts = [t.replace(\"\\n\", \"\") for t", "All rights reserved. # # Licensed under the Apache License,", "by applicable law or agreed to in writing, software #", "self.__dataset_len class ExampleModel(ModelPT): def __init__(self, *args, **kwargs): cfg = OmegaConf.structured({})", "texts = [t.replace(\"\\n\", \"\") for t in texts] log_global_rank =", "\"\") for t in texts] log_global_rank = get_rank_info(texts, rank_key='M.Global Rank')", "global rank is not equal to trainer.global_rank !\") exit(1) if", "line.split(\":\")[-1] rank_value = int(rank_value) return rank_value print(\"Could not find the", "import os import shutil import torch from omegaconf import OmegaConf", "print(\"Logged world size if not equal to trainer.world_size !\") exit(1)", "dataset_len def __getitem__(self, *args): return torch.ones(2) def __len__(self): return self.__dataset_len", "to trainer.world_size !\") exit(1) @rank_zero_only def cleanup(): if os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check',", "line: rank_value = line.split(\":\")[-1] rank_value = int(rank_value) return rank_value print(\"Could", "check_model_ranks(model) print(\"DDP checks passed !\") cleanup() if __name__ == '__main__':", "print(\"DDP checks passed !\") cleanup() if __name__ == '__main__': run_checks()", "for rank in range(world_size): filename = file_template.format(rank=rank) filepath = os.path.join(basedir,", "cfg = OmegaConf.structured({}) super().__init__(cfg, trainer=kwargs.get('trainer', None)) # dummy parameter in", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "size if not equal to trainer.world_size !\") exit(1) @rank_zero_only def", "file_template.format(rank=rank) filepath = os.path.join(basedir, filename) with open(filepath, 'r') as f:", "Unless required by applicable law or agreed to in writing,", "omegaconf import OmegaConf from pytorch_lightning import Trainer from pytorch_lightning.utilities.distributed import", "return rank_value print(\"Could not find the correct rank key !\")", "the specific language governing permissions and # limitations under the", "forward(self, batch): return batch.mean() def validation_step(self, batch, batch_idx): return self(batch)", "cleanup() trainer = instantiate_multinode_ddp_if_possible() model = setup_model(trainer) check_model_ranks(model) print(\"DDP checks", "equal to trainer.global_rank !\") exit(1) if log_world_size != world_size: print(\"Logged", "applicable law or agreed to in writing, software # distributed", "= torch.nn.modules.Linear(in_features=2, out_features=1) def train_dataloader(self): return None def val_dataloader(self): return", "setup_model(trainer) check_model_ranks(model) print(\"DDP checks passed !\") cleanup() if __name__ ==", "nemo.utils.exp_manager import ExpManagerConfig, exp_manager class OnesDataset(torch.utils.data.Dataset): def __init__(self, dataset_len): super().__init__()", "__len__(self): return self.__dataset_len class ExampleModel(ModelPT): def __init__(self, *args, **kwargs): cfg", "in writing, software # distributed under the License is distributed", "def setup_validation_data(self): pass def validation_epoch_end(self, loss): self.log(\"val_loss\", torch.stack(loss).mean()) def instantiate_multinode_ddp_if_possible():", "= OmegaConf.structured({}) super().__init__(cfg, trainer=kwargs.get('trainer', None)) # dummy parameter in order", "'version_0') file_template = \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size = torch.cuda.device_count() for rank in", "log_world_size = get_rank_info(texts, rank_key='M.World Size') if log_global_rank != rank: print(\"Logged", "= setup_model(trainer) check_model_ranks(model) print(\"DDP checks passed !\") cleanup() if __name__", "= instantiate_multinode_ddp_if_possible() model = setup_model(trainer) check_model_ranks(model) print(\"DDP checks passed !\")", "Trainer): model = ExampleModel(trainer=trainer) logging.info(f\"M.Global Rank:{model.global_rank}\") logging.info(f\"M.Local Rank:{model.local_rank}\") logging.info(f\"M.World Size:{model.trainer.world_size}\")", "range(world_size): filename = file_template.format(rank=rank) filepath = os.path.join(basedir, filename) with open(filepath,", "def train_dataloader(self): return None def val_dataloader(self): return None def predict_dataloader(self):", "dummy parameter in order to allow DDP to execute self.l1", "pass def validation_epoch_end(self, loss): self.log(\"val_loss\", torch.stack(loss).mean()) def instantiate_multinode_ddp_if_possible(): num_gpus =", "exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg)) return trainer def setup_model(trainer: Trainer): model = ExampleModel(trainer=trainer)", "import ExpManagerConfig, exp_manager class OnesDataset(torch.utils.data.Dataset): def __init__(self, dataset_len): super().__init__() self.__dataset_len", "exit(1) @rank_zero_only def check_model_ranks(model: ExampleModel): basedir = os.path.join('./ddp_check/', 'default', 'version_0')", "def check_model_ranks(model: ExampleModel): basedir = os.path.join('./ddp_check/', 'default', 'version_0') file_template =", "'default', 'version_0') file_template = \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size = torch.cuda.device_count() for rank", "world size if not equal to trainer.world_size !\") exit(1) @rank_zero_only", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "batch_idx): return self(batch) def list_available_models(self): pass def setup_training_data(self): pass def", "# You may obtain a copy of the License at", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "dataset = OnesDataset(2) return torch.utils.data.DataLoader(dataset, batch_size=2) def forward(self, batch): return", "CORPORATION. All rights reserved. # # Licensed under the Apache", "exp_manager class OnesDataset(torch.utils.data.Dataset): def __init__(self, dataset_len): super().__init__() self.__dataset_len = dataset_len", "trainer=kwargs.get('trainer', None)) # dummy parameter in order to allow DDP", "import rank_zero_only from nemo.core import ModelPT from nemo.utils import logging", "the License for the specific language governing permissions and #", "Apache License, Version 2.0 (the \"License\"); # you may not", "get_rank_info(texts, rank_key='M.Global Rank') log_world_size = get_rank_info(texts, rank_key='M.World Size') if log_global_rank", "either express or implied. # See the License for the", "shutil.rmtree('./ddp_check', ignore_errors=True) def run_checks(): cleanup() trainer = instantiate_multinode_ddp_if_possible() model =", "nemo.utils import logging from nemo.utils.exp_manager import ExpManagerConfig, exp_manager class OnesDataset(torch.utils.data.Dataset):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "(c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed", "OmegaConf.structured({}) super().__init__(cfg, trainer=kwargs.get('trainer', None)) # dummy parameter in order to", "= OnesDataset(2) return torch.utils.data.DataLoader(dataset, batch_size=2) def forward(self, batch): return batch.mean()", "rank is not equal to trainer.global_rank !\") exit(1) if log_world_size", "basedir = os.path.join('./ddp_check/', 'default', 'version_0') file_template = \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size =", "rank_zero_only from nemo.core import ModelPT from nemo.utils import logging from", "Rank:{model.local_rank}\") logging.info(f\"M.World Size:{model.trainer.world_size}\") trainer.predict(model) return model def get_rank_info(texts: list, rank_key:", "return batch.mean() def validation_step(self, batch, batch_idx): return self(batch) def training_step(self,", "import torch from omegaconf import OmegaConf from pytorch_lightning import Trainer", "self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1) def train_dataloader(self): return None def val_dataloader(self):", "batch.mean() def validation_step(self, batch, batch_idx): return self(batch) def training_step(self, batch,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "ExampleModel(trainer=trainer) logging.info(f\"M.Global Rank:{model.global_rank}\") logging.info(f\"M.Local Rank:{model.local_rank}\") logging.info(f\"M.World Size:{model.trainer.world_size}\") trainer.predict(model) return model", "instantiate_multinode_ddp_if_possible(): num_gpus = torch.cuda.device_count() trainer = Trainer(gpus=num_gpus, accelerator='ddp', logger=None, checkpoint_callback=None)", "= torch.cuda.device_count() for rank in range(world_size): filename = file_template.format(rank=rank) filepath", "*args, **kwargs): cfg = OmegaConf.structured({}) super().__init__(cfg, trainer=kwargs.get('trainer', None)) # dummy", "if log_world_size != world_size: print(\"Logged world size if not equal", "run_checks(): cleanup() trainer = instantiate_multinode_ddp_if_possible() model = setup_model(trainer) check_model_ranks(model) print(\"DDP", "under the License. import os import shutil import torch from", "def __init__(self, dataset_len): super().__init__() self.__dataset_len = dataset_len def __getitem__(self, *args):", "def forward(self, batch): return batch.mean() def validation_step(self, batch, batch_idx): return", "Size') if log_global_rank != rank: print(\"Logged global rank is not", "logging.info(f\"M.Global Rank:{model.global_rank}\") logging.info(f\"M.Local Rank:{model.local_rank}\") logging.info(f\"M.World Size:{model.trainer.world_size}\") trainer.predict(model) return model def", "\"License\"); # you may not use this file except in", "ExampleModel): basedir = os.path.join('./ddp_check/', 'default', 'version_0') file_template = \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size", "filepath = os.path.join(basedir, filename) with open(filepath, 'r') as f: texts", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "version=\"\") exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg)) return trainer def setup_model(trainer: Trainer): model =", "__getitem__(self, *args): return torch.ones(2) def __len__(self): return self.__dataset_len class ExampleModel(ModelPT):", "!= rank: print(\"Logged global rank is not equal to trainer.global_rank", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "self.__dataset_len = dataset_len def __getitem__(self, *args): return torch.ones(2) def __len__(self):", "def validation_step(self, batch, batch_idx): return self(batch) def training_step(self, batch, batch_idx):", "<filename>tests/core_ptl/check_for_ranks.py # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "trainer.world_size !\") exit(1) @rank_zero_only def cleanup(): if os.path.exists('./ddp_check'): shutil.rmtree('./ddp_check', ignore_errors=True)", "rank_key in line: rank_value = line.split(\":\")[-1] rank_value = int(rank_value) return", "You may obtain a copy of the License at #", "@rank_zero_only def check_model_ranks(model: ExampleModel): basedir = os.path.join('./ddp_check/', 'default', 'version_0') file_template", "= line.split(\":\")[-1] rank_value = int(rank_value) return rank_value print(\"Could not find", "find the correct rank key !\") exit(1) @rank_zero_only def check_model_ranks(model:", "return None def predict_dataloader(self): dataset = OnesDataset(2) return torch.utils.data.DataLoader(dataset, batch_size=2)", "the Apache License, Version 2.0 (the \"License\"); # you may", "def list_available_models(self): pass def setup_training_data(self): pass def setup_validation_data(self): pass def", "= os.path.join('./ddp_check/', 'default', 'version_0') file_template = \"nemo_log_globalrank-{rank}_localrank-{rank}.txt\" world_size = torch.cuda.device_count()", "exp_manager_cfg = ExpManagerConfig(exp_dir='./ddp_check/', use_datetime_version=False, version=\"\") exp_manager(trainer, cfg=OmegaConf.structured(exp_manager_cfg)) return trainer def" ]
[ ":param user_id: The ID of the user that should be", "is a template to create your own discord bot in", "\"\"\" with open(\"blacklist.json\", \"r+\") as file: file_data = json.load(file) file_data[\"ids\"].append(user_id)", "None: \"\"\" This function will remove a user based on", "\"\"\" with open(\"blacklist.json\", \"r\") as file: file_data = json.load(file) file_data[\"ids\"].remove(user_id)", "add_user_to_blacklist(user_id: int) -> None: \"\"\" This function will add a", "(https://krypton.ninja) Description: This is a template to create your own", "from the blacklist.json file. :param user_id: The ID of the", "blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r\") as file: file_data =", "This is a template to create your own discord bot", "-> None: \"\"\" This function will add a user based", "add a user based on its ID in the blacklist.json", "\"r\") as file: file_data = json.load(file) file_data[\"ids\"].remove(user_id) with open(\"blacklist.json\", \"w\")", "open(\"blacklist.json\", \"r+\") as file: file_data = json.load(file) file_data[\"ids\"].append(user_id) with open(\"blacklist.json\",", "be removed from the blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r\")", "open(\"blacklist.json\", \"w\") as file: file.seek(0) json.dump(file_data, file, indent=4) def remove_user_from_blacklist(user_id:", "user_id: The ID of the user that should be added", "should be added into the blacklist.json file. \"\"\" with open(\"blacklist.json\",", "the user that should be removed from the blacklist.json file.", "\"\"\" import json def add_user_to_blacklist(user_id: int) -> None: \"\"\" This", "import json def add_user_to_blacklist(user_id: int) -> None: \"\"\" This function", "https://github.com/kkrypt0nn (https://krypton.ninja) Description: This is a template to create your", "should be removed from the blacklist.json file. \"\"\" with open(\"blacklist.json\",", "own discord bot in python. Version: 4.1 \"\"\" import json", "file_data[\"ids\"].append(user_id) with open(\"blacklist.json\", \"w\") as file: file.seek(0) json.dump(file_data, file, indent=4)", "Description: This is a template to create your own discord", "on its ID in the blacklist.json file. :param user_id: The", "with open(\"blacklist.json\", \"w\") as file: file.seek(0) json.dump(file_data, file, indent=4) def", "discord bot in python. Version: 4.1 \"\"\" import json def", "ID of the user that should be added into the", "blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r+\") as file: file_data =", "to create your own discord bot in python. Version: 4.1", "json def add_user_to_blacklist(user_id: int) -> None: \"\"\" This function will", "- https://github.com/kkrypt0nn (https://krypton.ninja) Description: This is a template to create", "based on its ID in the blacklist.json file. :param user_id:", "file: file.seek(0) json.dump(file_data, file, indent=4) def remove_user_from_blacklist(user_id: int) -> None:", "the blacklist.json file. :param user_id: The ID of the user", "ID from the blacklist.json file. :param user_id: The ID of", "file. :param user_id: The ID of the user that should", "file: file_data = json.load(file) file_data[\"ids\"].append(user_id) with open(\"blacklist.json\", \"w\") as file:", "removed from the blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r\") as", "that should be added into the blacklist.json file. \"\"\" with", "json.dump(file_data, file, indent=4) def remove_user_from_blacklist(user_id: int) -> None: \"\"\" This", "the blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r\") as file: file_data", "json.load(file) file_data[\"ids\"].remove(user_id) with open(\"blacklist.json\", \"w\") as file: file.seek(0) json.dump(file_data, file,", "-> None: \"\"\" This function will remove a user based", "The ID of the user that should be removed from", "Copyright © Krypton 2022 - https://github.com/kkrypt0nn (https://krypton.ninja) Description: This is", "= json.load(file) file_data[\"ids\"].append(user_id) with open(\"blacklist.json\", \"w\") as file: file.seek(0) json.dump(file_data,", "that should be removed from the blacklist.json file. \"\"\" with", "function will add a user based on its ID in", "user that should be removed from the blacklist.json file. \"\"\"", "file.seek(0) json.dump(file_data, file, indent=4) def remove_user_from_blacklist(user_id: int) -> None: \"\"\"", "remove_user_from_blacklist(user_id: int) -> None: \"\"\" This function will remove a", "int) -> None: \"\"\" This function will remove a user", "as file: file_data = json.load(file) file_data[\"ids\"].remove(user_id) with open(\"blacklist.json\", \"w\") as", "file_data = json.load(file) file_data[\"ids\"].append(user_id) with open(\"blacklist.json\", \"w\") as file: file.seek(0)", "user based on its ID in the blacklist.json file. :param", "a user based on its ID in the blacklist.json file.", "\"\"\" This function will remove a user based on its", "will remove a user based on its ID from the", "file, indent=4) def remove_user_from_blacklist(user_id: int) -> None: \"\"\" This function", "open(\"blacklist.json\", \"r\") as file: file_data = json.load(file) file_data[\"ids\"].remove(user_id) with open(\"blacklist.json\",", "indent=4) def remove_user_from_blacklist(user_id: int) -> None: \"\"\" This function will", "a template to create your own discord bot in python.", "\"w\") as file: file.seek(0) json.dump(file_data, file, indent=4) def remove_user_from_blacklist(user_id: int)", "int) -> None: \"\"\" This function will add a user", "as file: file.seek(0) json.dump(file_data, file, indent=4) def remove_user_from_blacklist(user_id: int) ->", "create your own discord bot in python. Version: 4.1 \"\"\"", "This function will remove a user based on its ID", "a user based on its ID from the blacklist.json file.", "user based on its ID from the blacklist.json file. :param", "based on its ID from the blacklist.json file. :param user_id:", "ID of the user that should be removed from the", "\"\"\" This function will add a user based on its", "Version: 4.1 \"\"\" import json def add_user_to_blacklist(user_id: int) -> None:", "4.1 \"\"\" import json def add_user_to_blacklist(user_id: int) -> None: \"\"\"", "its ID in the blacklist.json file. :param user_id: The ID", "of the user that should be added into the blacklist.json", "This function will add a user based on its ID", "file: file_data = json.load(file) file_data[\"ids\"].remove(user_id) with open(\"blacklist.json\", \"w\") as file:", "in the blacklist.json file. :param user_id: The ID of the", "bot in python. Version: 4.1 \"\"\" import json def add_user_to_blacklist(user_id:", "Krypton 2022 - https://github.com/kkrypt0nn (https://krypton.ninja) Description: This is a template", "into the blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r+\") as file:", "\"\"\"\" Copyright © Krypton 2022 - https://github.com/kkrypt0nn (https://krypton.ninja) Description: This", "added into the blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r+\") as", "remove a user based on its ID from the blacklist.json", "will add a user based on its ID in the", "2022 - https://github.com/kkrypt0nn (https://krypton.ninja) Description: This is a template to", "as file: file_data = json.load(file) file_data[\"ids\"].append(user_id) with open(\"blacklist.json\", \"w\") as", "© Krypton 2022 - https://github.com/kkrypt0nn (https://krypton.ninja) Description: This is a", "file. \"\"\" with open(\"blacklist.json\", \"r\") as file: file_data = json.load(file)", "in python. Version: 4.1 \"\"\" import json def add_user_to_blacklist(user_id: int)", "file_data[\"ids\"].remove(user_id) with open(\"blacklist.json\", \"w\") as file: file.seek(0) json.dump(file_data, file, indent=4)", "your own discord bot in python. Version: 4.1 \"\"\" import", "file. \"\"\" with open(\"blacklist.json\", \"r+\") as file: file_data = json.load(file)", "<gh_stars>0 \"\"\"\" Copyright © Krypton 2022 - https://github.com/kkrypt0nn (https://krypton.ninja) Description:", "None: \"\"\" This function will add a user based on", "with open(\"blacklist.json\", \"r\") as file: file_data = json.load(file) file_data[\"ids\"].remove(user_id) with", "on its ID from the blacklist.json file. :param user_id: The", "with open(\"blacklist.json\", \"r+\") as file: file_data = json.load(file) file_data[\"ids\"].append(user_id) with", "user_id: The ID of the user that should be removed", "the user that should be added into the blacklist.json file.", "= json.load(file) file_data[\"ids\"].remove(user_id) with open(\"blacklist.json\", \"w\") as file: file.seek(0) json.dump(file_data,", "the blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r+\") as file: file_data", "function will remove a user based on its ID from", "be added into the blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r+\")", "ID in the blacklist.json file. :param user_id: The ID of", "its ID from the blacklist.json file. :param user_id: The ID", "def remove_user_from_blacklist(user_id: int) -> None: \"\"\" This function will remove", "python. Version: 4.1 \"\"\" import json def add_user_to_blacklist(user_id: int) ->", "file_data = json.load(file) file_data[\"ids\"].remove(user_id) with open(\"blacklist.json\", \"w\") as file: file.seek(0)", "user that should be added into the blacklist.json file. \"\"\"", "of the user that should be removed from the blacklist.json", "blacklist.json file. :param user_id: The ID of the user that", "\"r+\") as file: file_data = json.load(file) file_data[\"ids\"].append(user_id) with open(\"blacklist.json\", \"w\")", "json.load(file) file_data[\"ids\"].append(user_id) with open(\"blacklist.json\", \"w\") as file: file.seek(0) json.dump(file_data, file,", "template to create your own discord bot in python. Version:", "from the blacklist.json file. \"\"\" with open(\"blacklist.json\", \"r\") as file:", "def add_user_to_blacklist(user_id: int) -> None: \"\"\" This function will add", "The ID of the user that should be added into" ]
[ "common.expand_string(pattern, text)) def test_working_0002(self): '''Test that correct input for expand_string", "fails to prevent a bad match from occurring.''' text =", "import common class ParseTestCase(unittest.TestCase): '''Test generic parsing-related functions.''' def test_working_0001(self):", "for expand_string works as expected.''' shot = 'NAME_010' format_string =", "= '/jobs/some_job_here/some_kind/of/real_folders' expected_output = {'JOB': 'some_job_here', 'THING': 'of'} self.assertEqual(expected_output, common.expand_string(pattern,", "python # -*- coding: utf-8 -*- '''Make sure that generic", "'''Force expand_string fails to prevent a bad match from occurring.'''", "from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern, text))", "coding: utf-8 -*- '''Make sure that generic functions work exactly", "test_working_0001(self): '''Test that correct input for expand_string works as expected.'''", "pattern = '/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern, text)) def test_expand_string_failure_0002(self): '''Force expand_string fails", "to prevent a bad match from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders'", "expected_output = {'JOB': 'some_job_here', 'THING': 'of'} self.assertEqual(expected_output, common.expand_string(pattern, text)) def", "LIBRARIES from ways import common class ParseTestCase(unittest.TestCase): '''Test generic parsing-related", "-*- coding: utf-8 -*- '''Make sure that generic functions work", "functions.''' def test_working_0001(self): '''Test that correct input for expand_string works", "= '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern, text)) def test_expand_string_failure_0002(self): '''Force", "from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/{SHOTNAME}/real_folders/inner' self.assertFalse(common.expand_string(pattern, text))", "# -*- coding: utf-8 -*- '''Make sure that generic functions", "'ID': '010'} self.assertEqual(expected_output, common.expand_string(format_string, shot)) def test_expand_string_failure_0001(self): '''Force expand_string fails", "a bad match from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern =", "'''Make sure that generic functions work exactly as we expect.'''", "test_expand_string_failure_0001(self): '''Force expand_string fails to prevent a bad match from", "input for expand_string works as expected.''' pattern = '/jobs/{JOB}/some_kind/{THING}/real_folders' text", "= {'JOB': 'some_job_here', 'THING': 'of'} self.assertEqual(expected_output, common.expand_string(pattern, text)) def test_working_0002(self):", "that correct input for expand_string works as expected.''' shot =", "'NAME', 'ID': '010'} self.assertEqual(expected_output, common.expand_string(format_string, shot)) def test_expand_string_failure_0001(self): '''Force expand_string", "expand_string fails to prevent a bad match from occurring.''' text", "'/jobs/some_job_here/some_kind/of/real_folders' expected_output = {'JOB': 'some_job_here', 'THING': 'of'} self.assertEqual(expected_output, common.expand_string(pattern, text))", "'some_job_here', 'THING': 'of'} self.assertEqual(expected_output, common.expand_string(pattern, text)) def test_working_0002(self): '''Test that", "prevent a bad match from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern", "exactly as we expect.''' # IMPORT STANDARD LIBRARIES import unittest", "common class ParseTestCase(unittest.TestCase): '''Test generic parsing-related functions.''' def test_working_0001(self): '''Test", "test_working_0002(self): '''Test that correct input for expand_string works as expected.'''", "IMPORT WAYS LIBRARIES from ways import common class ParseTestCase(unittest.TestCase): '''Test", "= '/jobs/{JOB}/some_kind/{THING}/real_folders' text = '/jobs/some_job_here/some_kind/of/real_folders' expected_output = {'JOB': 'some_job_here', 'THING':", "LIBRARIES import unittest # IMPORT WAYS LIBRARIES from ways import", "'''Test that correct input for expand_string works as expected.''' shot", "= '/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern, text)) def test_expand_string_failure_0002(self): '''Force expand_string fails to", "# IMPORT WAYS LIBRARIES from ways import common class ParseTestCase(unittest.TestCase):", "unittest # IMPORT WAYS LIBRARIES from ways import common class", "ParseTestCase(unittest.TestCase): '''Test generic parsing-related functions.''' def test_working_0001(self): '''Test that correct", "expected.''' pattern = '/jobs/{JOB}/some_kind/{THING}/real_folders' text = '/jobs/some_job_here/some_kind/of/real_folders' expected_output = {'JOB':", "generic parsing-related functions.''' def test_working_0001(self): '''Test that correct input for", "self.assertEqual(expected_output, common.expand_string(pattern, text)) def test_working_0002(self): '''Test that correct input for", "match from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern,", "for expand_string works as expected.''' pattern = '/jobs/{JOB}/some_kind/{THING}/real_folders' text =", "works as expected.''' shot = 'NAME_010' format_string = '{SHOT}_{ID}' expected_output", "work exactly as we expect.''' # IMPORT STANDARD LIBRARIES import", "expand_string works as expected.''' pattern = '/jobs/{JOB}/some_kind/{THING}/real_folders' text = '/jobs/some_job_here/some_kind/of/real_folders'", "common.expand_string(format_string, shot)) def test_expand_string_failure_0001(self): '''Force expand_string fails to prevent a", "shot)) def test_expand_string_failure_0001(self): '''Force expand_string fails to prevent a bad", "that correct input for expand_string works as expected.''' pattern =", "as expected.''' pattern = '/jobs/{JOB}/some_kind/{THING}/real_folders' text = '/jobs/some_job_here/some_kind/of/real_folders' expected_output =", "'''Test generic parsing-related functions.''' def test_working_0001(self): '''Test that correct input", "= 'NAME_010' format_string = '{SHOT}_{ID}' expected_output = {'SHOT': 'NAME', 'ID':", "as we expect.''' # IMPORT STANDARD LIBRARIES import unittest #", "STANDARD LIBRARIES import unittest # IMPORT WAYS LIBRARIES from ways", "'NAME_010' format_string = '{SHOT}_{ID}' expected_output = {'SHOT': 'NAME', 'ID': '010'}", "expect.''' # IMPORT STANDARD LIBRARIES import unittest # IMPORT WAYS", "def test_working_0002(self): '''Test that correct input for expand_string works as", "from ways import common class ParseTestCase(unittest.TestCase): '''Test generic parsing-related functions.'''", "bad match from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/{SHOTNAME}/real_folders/inner'", "bad match from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/of/real_folders/inner'", "ways import common class ParseTestCase(unittest.TestCase): '''Test generic parsing-related functions.''' def", "occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern, text)) def", "shot = 'NAME_010' format_string = '{SHOT}_{ID}' expected_output = {'SHOT': 'NAME',", "generic functions work exactly as we expect.''' # IMPORT STANDARD", "correct input for expand_string works as expected.''' shot = 'NAME_010'", "'of'} self.assertEqual(expected_output, common.expand_string(pattern, text)) def test_working_0002(self): '''Test that correct input", "self.assertFalse(common.expand_string(pattern, text)) def test_expand_string_failure_0002(self): '''Force expand_string fails to prevent a", "'/jobs/{JOB}/some_kind/{THING}/real_folders' text = '/jobs/some_job_here/some_kind/of/real_folders' expected_output = {'JOB': 'some_job_here', 'THING': 'of'}", "#!/usr/bin/env python # -*- coding: utf-8 -*- '''Make sure that", "'010'} self.assertEqual(expected_output, common.expand_string(format_string, shot)) def test_expand_string_failure_0001(self): '''Force expand_string fails to", "expected.''' shot = 'NAME_010' format_string = '{SHOT}_{ID}' expected_output = {'SHOT':", "expected_output = {'SHOT': 'NAME', 'ID': '010'} self.assertEqual(expected_output, common.expand_string(format_string, shot)) def", "'THING': 'of'} self.assertEqual(expected_output, common.expand_string(pattern, text)) def test_working_0002(self): '''Test that correct", "expand_string works as expected.''' shot = 'NAME_010' format_string = '{SHOT}_{ID}'", "{'SHOT': 'NAME', 'ID': '010'} self.assertEqual(expected_output, common.expand_string(format_string, shot)) def test_expand_string_failure_0001(self): '''Force", "def test_expand_string_failure_0001(self): '''Force expand_string fails to prevent a bad match", "text = '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern, text)) def test_expand_string_failure_0002(self):", "that generic functions work exactly as we expect.''' # IMPORT", "-*- '''Make sure that generic functions work exactly as we", "class ParseTestCase(unittest.TestCase): '''Test generic parsing-related functions.''' def test_working_0001(self): '''Test that", "'''Test that correct input for expand_string works as expected.''' pattern", "import unittest # IMPORT WAYS LIBRARIES from ways import common", "correct input for expand_string works as expected.''' pattern = '/jobs/{JOB}/some_kind/{THING}/real_folders'", "'{SHOT}_{ID}' expected_output = {'SHOT': 'NAME', 'ID': '010'} self.assertEqual(expected_output, common.expand_string(format_string, shot))", "WAYS LIBRARIES from ways import common class ParseTestCase(unittest.TestCase): '''Test generic", "we expect.''' # IMPORT STANDARD LIBRARIES import unittest # IMPORT", "pattern = '/jobs/{JOB}/some_kind/{THING}/real_folders' text = '/jobs/some_job_here/some_kind/of/real_folders' expected_output = {'JOB': 'some_job_here',", "def test_working_0001(self): '''Test that correct input for expand_string works as", "text = '/jobs/some_job_here/some_kind/of/real_folders' expected_output = {'JOB': 'some_job_here', 'THING': 'of'} self.assertEqual(expected_output,", "{'JOB': 'some_job_here', 'THING': 'of'} self.assertEqual(expected_output, common.expand_string(pattern, text)) def test_working_0002(self): '''Test", "'/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern, text)) def test_expand_string_failure_0002(self): '''Force expand_string", "as expected.''' shot = 'NAME_010' format_string = '{SHOT}_{ID}' expected_output =", "parsing-related functions.''' def test_working_0001(self): '''Test that correct input for expand_string", "works as expected.''' pattern = '/jobs/{JOB}/some_kind/{THING}/real_folders' text = '/jobs/some_job_here/some_kind/of/real_folders' expected_output", "sure that generic functions work exactly as we expect.''' #", "format_string = '{SHOT}_{ID}' expected_output = {'SHOT': 'NAME', 'ID': '010'} self.assertEqual(expected_output,", "text)) def test_expand_string_failure_0002(self): '''Force expand_string fails to prevent a bad", "= '{SHOT}_{ID}' expected_output = {'SHOT': 'NAME', 'ID': '010'} self.assertEqual(expected_output, common.expand_string(format_string,", "match from occurring.''' text = '/jobs/some_job/some_kind/of/real_folders' pattern = '/jobs/{JOB}/some_kind/{SHOTNAME}/real_folders/inner' self.assertFalse(common.expand_string(pattern,", "def test_expand_string_failure_0002(self): '''Force expand_string fails to prevent a bad match", "functions work exactly as we expect.''' # IMPORT STANDARD LIBRARIES", "# IMPORT STANDARD LIBRARIES import unittest # IMPORT WAYS LIBRARIES", "self.assertEqual(expected_output, common.expand_string(format_string, shot)) def test_expand_string_failure_0001(self): '''Force expand_string fails to prevent", "input for expand_string works as expected.''' shot = 'NAME_010' format_string", "test_expand_string_failure_0002(self): '''Force expand_string fails to prevent a bad match from", "text)) def test_working_0002(self): '''Test that correct input for expand_string works", "= {'SHOT': 'NAME', 'ID': '010'} self.assertEqual(expected_output, common.expand_string(format_string, shot)) def test_expand_string_failure_0001(self):", "'/jobs/{JOB}/some_kind/of/real_folders/inner' self.assertFalse(common.expand_string(pattern, text)) def test_expand_string_failure_0002(self): '''Force expand_string fails to prevent", "utf-8 -*- '''Make sure that generic functions work exactly as", "IMPORT STANDARD LIBRARIES import unittest # IMPORT WAYS LIBRARIES from" ]
[ "'Programming Language :: Python :: 3', 'Programming Language :: Python", "MIT License', 'Programming Language :: Python :: 3', 'Programming Language", "License', 'Programming Language :: Python :: 3', 'Programming Language ::", "Language :: Python :: 3.3', 'Programming Language :: Python ::", "setup( name='natasha', version='0.2.0', description='Named-entity recognition for russian language', url='https://github.com/bureaucratic-labs/natasha', author='<NAME>',", "name='natasha', version='0.2.0', description='Named-entity recognition for russian language', url='https://github.com/bureaucratic-labs/natasha', author='<NAME>', author_email='<EMAIL>',", "from setuptools import setup, find_packages setup( name='natasha', version='0.2.0', description='Named-entity recognition", "description='Named-entity recognition for russian language', url='https://github.com/bureaucratic-labs/natasha', author='<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[", "author='<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[ 'Development Status :: 3 - Alpha',", ":: MIT License', 'Programming Language :: Python :: 3', 'Programming", "Developers', 'License :: OSI Approved :: MIT License', 'Programming Language", "language processing, russian morphology, named entity recognition, tomita', packages=find_packages(), install_requires=[", "'Intended Audience :: Developers', 'License :: OSI Approved :: MIT", "url='https://github.com/bureaucratic-labs/natasha', author='<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[ 'Development Status :: 3 -", "tomita', packages=find_packages(), install_requires=[ 'yargy==0.3.0' ], extras_require={ 'web': [ 'ujson', 'aiohttp',", ":: Python :: 3.3', 'Programming Language :: Python :: 3.4',", ":: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language", ":: Developers', 'License :: OSI Approved :: MIT License', 'Programming", ":: 3', 'Programming Language :: Python :: 3.3', 'Programming Language", "russian morphology, named entity recognition, tomita', packages=find_packages(), install_requires=[ 'yargy==0.3.0' ],", "Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming", "keywords='natural language processing, russian morphology, named entity recognition, tomita', packages=find_packages(),", "'yargy==0.3.0' ], extras_require={ 'web': [ 'ujson', 'aiohttp', ], }, )", "Language :: Python :: 3.4', 'Programming Language :: Python ::", "Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming", "license='MIT', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience", ":: 3.4', 'Programming Language :: Python :: 3.5', ], keywords='natural", "'License :: OSI Approved :: MIT License', 'Programming Language ::", "OSI Approved :: MIT License', 'Programming Language :: Python ::", "'Programming Language :: Python :: 3.4', 'Programming Language :: Python", "Language :: Python :: 3.5', ], keywords='natural language processing, russian", "author_email='<EMAIL>', license='MIT', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended", "- Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved", "Language :: Python :: 3', 'Programming Language :: Python ::", "Python :: 3.4', 'Programming Language :: Python :: 3.5', ],", "3.3', 'Programming Language :: Python :: 3.4', 'Programming Language ::", "Audience :: Developers', 'License :: OSI Approved :: MIT License',", "3.4', 'Programming Language :: Python :: 3.5', ], keywords='natural language", "recognition, tomita', packages=find_packages(), install_requires=[ 'yargy==0.3.0' ], extras_require={ 'web': [ 'ujson',", "install_requires=[ 'yargy==0.3.0' ], extras_require={ 'web': [ 'ujson', 'aiohttp', ], },", "3.5', ], keywords='natural language processing, russian morphology, named entity recognition,", "setup, find_packages setup( name='natasha', version='0.2.0', description='Named-entity recognition for russian language',", "setuptools import setup, find_packages setup( name='natasha', version='0.2.0', description='Named-entity recognition for", "<reponame>glibin/natasha<gh_stars>1-10 from setuptools import setup, find_packages setup( name='natasha', version='0.2.0', description='Named-entity", "Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License", "'Development Status :: 3 - Alpha', 'Intended Audience :: Developers',", "3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI", ":: OSI Approved :: MIT License', 'Programming Language :: Python", "Approved :: MIT License', 'Programming Language :: Python :: 3',", "version='0.2.0', description='Named-entity recognition for russian language', url='https://github.com/bureaucratic-labs/natasha', author='<NAME>', author_email='<EMAIL>', license='MIT',", ":: Python :: 3.4', 'Programming Language :: Python :: 3.5',", "'Programming Language :: Python :: 3.5', ], keywords='natural language processing,", ":: Python :: 3', 'Programming Language :: Python :: 3.3',", ":: Python :: 3.5', ], keywords='natural language processing, russian morphology,", "3', 'Programming Language :: Python :: 3.3', 'Programming Language ::", "], keywords='natural language processing, russian morphology, named entity recognition, tomita',", "processing, russian morphology, named entity recognition, tomita', packages=find_packages(), install_requires=[ 'yargy==0.3.0'", "Python :: 3.5', ], keywords='natural language processing, russian morphology, named", "named entity recognition, tomita', packages=find_packages(), install_requires=[ 'yargy==0.3.0' ], extras_require={ 'web':", "recognition for russian language', url='https://github.com/bureaucratic-labs/natasha', author='<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[ 'Development", "language', url='https://github.com/bureaucratic-labs/natasha', author='<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[ 'Development Status :: 3", "morphology, named entity recognition, tomita', packages=find_packages(), install_requires=[ 'yargy==0.3.0' ], extras_require={", "find_packages setup( name='natasha', version='0.2.0', description='Named-entity recognition for russian language', url='https://github.com/bureaucratic-labs/natasha',", "russian language', url='https://github.com/bureaucratic-labs/natasha', author='<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[ 'Development Status ::", ":: 3 - Alpha', 'Intended Audience :: Developers', 'License ::", "'Programming Language :: Python :: 3.3', 'Programming Language :: Python", "entity recognition, tomita', packages=find_packages(), install_requires=[ 'yargy==0.3.0' ], extras_require={ 'web': [", "for russian language', url='https://github.com/bureaucratic-labs/natasha', author='<NAME>', author_email='<EMAIL>', license='MIT', classifiers=[ 'Development Status", "classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience ::", "Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved ::", ":: 3.5', ], keywords='natural language processing, russian morphology, named entity", "import setup, find_packages setup( name='natasha', version='0.2.0', description='Named-entity recognition for russian", "packages=find_packages(), install_requires=[ 'yargy==0.3.0' ], extras_require={ 'web': [ 'ujson', 'aiohttp', ]," ]
[ "at_least_one_punctuation = False else: character = random.choice(valid_characters) password += character", "Python3; already using input pass letters = string.ascii_letters numbers =", "characters to meet the password length restriction. In addition, the", "# random.shuffle shuffles a list *in place* # random.shuffle(characters) #", "# # Start with a blank password and then go", "the password so we don't always get a # letter", "else: character = random.choice(valid_characters) password += character # # Finally,", "one of each of the selected # groups is chosen,", "and some # punctuation. # characters = list(password) # #", "return all the strings in (...) joined by X #", "string.punctuation def generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation): \"\"\"Generate a password by", "are lots of corner-cases. # password = \"\".join(characters) return password", "= False else: character = random.choice(valid_characters) password += character #", "\"\".join(characters) return password if __name__ == '__main__': password_length = int(input(\"How", "numbers = string.digits punctuation = string.punctuation def generate(password_length, at_least_one_letter, at_least_one_number,", "random characters to meet the password length restriction. In addition,", "we don't always get a # letter at the beginning,", "at_least_one_letter, at_least_one_number, at_least_one_punctuation): \"\"\"Generate a password by include enough random", "at_least_one_number: character = random.choice(numbers) at_least_one_number = False elif at_least_one_punctuation: character", "Python2 work like Python3 input = raw_input except NameError: #", "addition, the user can specify that at least one of", "valid_characters += letters if at_least_one_number: valid_characters += numbers if at_least_one_punctuation:", "of character be used. \"\"\" # # Any combination of", "return password if __name__ == '__main__': password_length = int(input(\"How many", "X # \", \".join(['Eggs', 'Bacon', 'Beans']) => \"Eggs, Bacon, Beans\"", "want to generate *real* .csv files, use the csv module", "one number [Y/n]? \").upper() or \"Y\") at_least_one_punctuation = \"Y\" ==", "elif at_least_one_number: character = random.choice(numbers) at_least_one_number = False elif at_least_one_punctuation:", "= list(password) # # random.shuffle shuffles a list *in place*", "\"\" if at_least_one_letter: valid_characters += letters if at_least_one_number: valid_characters +=", "one of the each of the classes of character be", "just choose randomly from all # groups. # if at_least_one_letter:", "'Bacon', 'Beans']) => \"Eggs, Bacon, Beans\" # But if you", "\").upper() or \"Y\") password = generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation) print(\"Your", "random.choice(numbers) at_least_one_number = False elif at_least_one_punctuation: character = random.choice(punctuation) at_least_one_punctuation", "already using input pass letters = string.ascii_letters numbers = string.digits", "times to make a password of the required length. #", "time around, ensure that one of each of the selected", "and then just choose randomly from all # groups. #", "letters = string.ascii_letters numbers = string.digits punctuation = string.punctuation def", "go round enough # times to make a password of", "random.shuffle(characters) # # X.join(...) means: return all the strings in", "if you want to generate *real* .csv files, use the", "password += character # # Finally, shuffle the password so", "# # X.join(...) means: return all the strings in (...)", "except NameError: # On Python3; already using input pass letters", "so we don't always get a # letter at the", "around, ensure that one of each of the selected #", "classes of character be used. \"\"\" # # Any combination", "make a password of the required length. # password =", "Python3 input = raw_input except NameError: # On Python3; already", "# random.shuffle(characters) # # X.join(...) means: return all the strings", "at_least_one_punctuation): \"\"\"Generate a password by include enough random characters to", "at_least_one_letter = False elif at_least_one_number: character = random.choice(numbers) at_least_one_number =", "randomly from all # groups. # if at_least_one_letter: character =", "or \"Y\") password = generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation) print(\"Your password", "one letter [Y/n]? \").upper() or \"Y\") at_least_one_number = \"Y\" ==", "at_least_one_letter: valid_characters += letters if at_least_one_number: valid_characters += numbers if", "punctuation # # Start with a blank password and then", "character # # Finally, shuffle the password so we don't", "'__main__': password_length = int(input(\"How many letters? \")) at_least_one_letter = \"Y\"", "the strings in (...) joined by X # \", \".join(['Eggs',", "of the classes of character be used. \"\"\" # #", "password = \"\".join(characters) return password if __name__ == '__main__': password_length", "generate *real* .csv files, use the csv module # because", "input pass letters = string.ascii_letters numbers = string.digits punctuation =", "(input(\"At least one punctuation [Y/n]? \").upper() or \"Y\") password =", "Bacon, Beans\" # But if you want to generate *real*", "def generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation): \"\"\"Generate a password by include", "import random import string try: # Make Python2 work like", "+= character # # Finally, shuffle the password so we", "choose randomly from all # groups. # if at_least_one_letter: character", "= random.choice(valid_characters) password += character # # Finally, shuffle the", "all # groups. # if at_least_one_letter: character = random.choice(letters) at_least_one_letter", "least one letter [Y/n]? \").upper() or \"Y\") at_least_one_number = \"Y\"", "least one number [Y/n]? \").upper() or \"Y\") at_least_one_punctuation = \"Y\"", "or \"Y\") at_least_one_punctuation = \"Y\" == (input(\"At least one punctuation", "restriction. In addition, the user can specify that at least", "there are lots of corner-cases. # password = \"\".join(characters) return", "+= punctuation # # Start with a blank password and", "False elif at_least_one_number: character = random.choice(numbers) at_least_one_number = False elif", "Finally, shuffle the password so we don't always get a", "# # Any combination of characters is valid # valid_characters", "= \"\" for i in range(password_length): # # Each time", "required length. # password = \"\" for i in range(password_length):", "at_least_one_number: valid_characters += numbers if at_least_one_punctuation: valid_characters += punctuation #", "\")) at_least_one_letter = \"Y\" == (input(\"At least one letter [Y/n]?", "string.digits punctuation = string.punctuation def generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation): \"\"\"Generate", "at_least_one_number = \"Y\" == (input(\"At least one number [Y/n]? \").upper()", "a # letter at the beginning, with a number after", "password_length = int(input(\"How many letters? \")) at_least_one_letter = \"Y\" ==", "== (input(\"At least one number [Y/n]? \").upper() or \"Y\") at_least_one_punctuation", "punctuation [Y/n]? \").upper() or \"Y\") password = generate(password_length, at_least_one_letter, at_least_one_number,", "# \", \".join(['Eggs', 'Bacon', 'Beans']) => \"Eggs, Bacon, Beans\" #", "[Y/n]? \").upper() or \"Y\") password = generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation)", "of the each of the classes of character be used.", "at_least_one_punctuation: character = random.choice(punctuation) at_least_one_punctuation = False else: character =", "punctuation = string.punctuation def generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation): \"\"\"Generate a", "(...) joined by X # \", \".join(['Eggs', 'Bacon', 'Beans']) =>", "sys import random import string try: # Make Python2 work", "# Make Python2 work like Python3 input = raw_input except", "# On Python3; already using input pass letters = string.ascii_letters", "valid # valid_characters = \"\" if at_least_one_letter: valid_characters += letters", "from all # groups. # if at_least_one_letter: character = random.choice(letters)", "False elif at_least_one_punctuation: character = random.choice(punctuation) at_least_one_punctuation = False else:", "[Y/n]? \").upper() or \"Y\") at_least_one_number = \"Y\" == (input(\"At least", "at_least_one_punctuation = \"Y\" == (input(\"At least one punctuation [Y/n]? \").upper()", "input = raw_input except NameError: # On Python3; already using", "list *in place* # random.shuffle(characters) # # X.join(...) means: return", "csv module # because there are lots of corner-cases. #", "ensure that one of each of the selected # groups", "valid_characters += numbers if at_least_one_punctuation: valid_characters += punctuation # #", "module # because there are lots of corner-cases. # password", "a list *in place* # random.shuffle(characters) # # X.join(...) means:", "like Python3 input = raw_input except NameError: # On Python3;", "chosen, and then just choose randomly from all # groups.", "user can specify that at least one of the each", "use the csv module # because there are lots of", "for i in range(password_length): # # Each time around, ensure", "\", \".join(['Eggs', 'Bacon', 'Beans']) => \"Eggs, Bacon, Beans\" # But", "letter [Y/n]? \").upper() or \"Y\") at_least_one_number = \"Y\" == (input(\"At", "joined by X # \", \".join(['Eggs', 'Bacon', 'Beans']) => \"Eggs,", "by X # \", \".join(['Eggs', 'Bacon', 'Beans']) => \"Eggs, Bacon,", "don't always get a # letter at the beginning, with", ".csv files, use the csv module # because there are", "\"Eggs, Bacon, Beans\" # But if you want to generate", "least one of the each of the classes of character", "= \"\" if at_least_one_letter: valid_characters += letters if at_least_one_number: valid_characters", "at_least_one_number = False elif at_least_one_punctuation: character = random.choice(punctuation) at_least_one_punctuation =", "# if at_least_one_letter: character = random.choice(letters) at_least_one_letter = False elif", "you want to generate *real* .csv files, use the csv", "# punctuation. # characters = list(password) # # random.shuffle shuffles", "length. # password = \"\" for i in range(password_length): #", "at least one of the each of the classes of", "False else: character = random.choice(valid_characters) password += character # #", "strings in (...) joined by X # \", \".join(['Eggs', 'Bacon',", "of corner-cases. # password = \"\".join(characters) return password if __name__", "*real* .csv files, use the csv module # because there", "by include enough random characters to meet the password length", "=> \"Eggs, Bacon, Beans\" # But if you want to", "enough # times to make a password of the required", "number after and some # punctuation. # characters = list(password)", "= False elif at_least_one_punctuation: character = random.choice(punctuation) at_least_one_punctuation = False", "many letters? \")) at_least_one_letter = \"Y\" == (input(\"At least one", "selected # groups is chosen, and then just choose randomly", "enough random characters to meet the password length restriction. In", "lots of corner-cases. # password = \"\".join(characters) return password if", "specify that at least one of the each of the", "= int(input(\"How many letters? \")) at_least_one_letter = \"Y\" == (input(\"At", "a password by include enough random characters to meet the", "NameError: # On Python3; already using input pass letters =", "groups. # if at_least_one_letter: character = random.choice(letters) at_least_one_letter = False", "at_least_one_number, at_least_one_punctuation): \"\"\"Generate a password by include enough random characters", "pass letters = string.ascii_letters numbers = string.digits punctuation = string.punctuation", "all the strings in (...) joined by X # \",", "characters = list(password) # # random.shuffle shuffles a list *in", "(input(\"At least one number [Y/n]? \").upper() or \"Y\") at_least_one_punctuation =", "= \"Y\" == (input(\"At least one punctuation [Y/n]? \").upper() or", "corner-cases. # password = \"\".join(characters) return password if __name__ ==", "letters if at_least_one_number: valid_characters += numbers if at_least_one_punctuation: valid_characters +=", "can specify that at least one of the each of", "at the beginning, with a number after and some #", "then go round enough # times to make a password", "= random.choice(numbers) at_least_one_number = False elif at_least_one_punctuation: character = random.choice(punctuation)", "import string try: # Make Python2 work like Python3 input", "number [Y/n]? \").upper() or \"Y\") at_least_one_punctuation = \"Y\" == (input(\"At", "character = random.choice(numbers) at_least_one_number = False elif at_least_one_punctuation: character =", "files, use the csv module # because there are lots", "# password = \"\" for i in range(password_length): # #", "Each time around, ensure that one of each of the", "password so we don't always get a # letter at", "random.choice(punctuation) at_least_one_punctuation = False else: character = random.choice(valid_characters) password +=", "place* # random.shuffle(characters) # # X.join(...) means: return all the", "if at_least_one_number: valid_characters += numbers if at_least_one_punctuation: valid_characters += punctuation", "*in place* # random.shuffle(characters) # # X.join(...) means: return all", "= \"Y\" == (input(\"At least one letter [Y/n]? \").upper() or", "# # Finally, shuffle the password so we don't always", "random import string try: # Make Python2 work like Python3", "the each of the classes of character be used. \"\"\"", "numbers if at_least_one_punctuation: valid_characters += punctuation # # Start with", "\"Y\") password = generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation) print(\"Your password is:", "valid_characters += punctuation # # Start with a blank password", "string.ascii_letters numbers = string.digits punctuation = string.punctuation def generate(password_length, at_least_one_letter,", "But if you want to generate *real* .csv files, use", "elif at_least_one_punctuation: character = random.choice(punctuation) at_least_one_punctuation = False else: character", "least one punctuation [Y/n]? \").upper() or \"Y\") password = generate(password_length,", "\"Y\" == (input(\"At least one letter [Y/n]? \").upper() or \"Y\")", "raw_input except NameError: # On Python3; already using input pass", "of the required length. # password = \"\" for i", "in range(password_length): # # Each time around, ensure that one", "beginning, with a number after and some # punctuation. #", "the user can specify that at least one of the", "(input(\"At least one letter [Y/n]? \").upper() or \"Y\") at_least_one_number =", "# Start with a blank password and then go round", "On Python3; already using input pass letters = string.ascii_letters numbers", "that at least one of the each of the classes", "letters? \")) at_least_one_letter = \"Y\" == (input(\"At least one letter", "\"Y\") at_least_one_number = \"Y\" == (input(\"At least one number [Y/n]?", "== (input(\"At least one punctuation [Y/n]? \").upper() or \"Y\") password", "# groups is chosen, and then just choose randomly from", "== (input(\"At least one letter [Y/n]? \").upper() or \"Y\") at_least_one_number", "letter at the beginning, with a number after and some", "get a # letter at the beginning, with a number", "= string.ascii_letters numbers = string.digits punctuation = string.punctuation def generate(password_length,", "used. \"\"\" # # Any combination of characters is valid", "# But if you want to generate *real* .csv files,", "Any combination of characters is valid # valid_characters = \"\"", "= string.punctuation def generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation): \"\"\"Generate a password", "the required length. # password = \"\" for i in", "string try: # Make Python2 work like Python3 input =", "some # punctuation. # characters = list(password) # # random.shuffle", "# characters = list(password) # # random.shuffle shuffles a list", "character be used. \"\"\" # # Any combination of characters", "try: # Make Python2 work like Python3 input = raw_input", "\").upper() or \"Y\") at_least_one_punctuation = \"Y\" == (input(\"At least one", "= string.digits punctuation = string.punctuation def generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation):", "password and then go round enough # times to make", "a password of the required length. # password = \"\"", "blank password and then go round enough # times to", "random.choice(valid_characters) password += character # # Finally, shuffle the password", "character = random.choice(letters) at_least_one_letter = False elif at_least_one_number: character =", "# times to make a password of the required length.", "\"Y\") at_least_one_punctuation = \"Y\" == (input(\"At least one punctuation [Y/n]?", "password if __name__ == '__main__': password_length = int(input(\"How many letters?", "shuffles a list *in place* # random.shuffle(characters) # # X.join(...)", "punctuation. # characters = list(password) # # random.shuffle shuffles a", "is valid # valid_characters = \"\" if at_least_one_letter: valid_characters +=", "Start with a blank password and then go round enough", "# # Each time around, ensure that one of each", "that one of each of the selected # groups is", "at_least_one_punctuation: valid_characters += punctuation # # Start with a blank", "= \"Y\" == (input(\"At least one number [Y/n]? \").upper() or", "and then go round enough # times to make a", "or \"Y\") at_least_one_number = \"Y\" == (input(\"At least one number", "import os, sys import random import string try: # Make", "Make Python2 work like Python3 input = raw_input except NameError:", "password length restriction. In addition, the user can specify that", "if at_least_one_letter: valid_characters += letters if at_least_one_number: valid_characters += numbers", "a blank password and then go round enough # times", "generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation): \"\"\"Generate a password by include enough", "the classes of character be used. \"\"\" # # Any", "combination of characters is valid # valid_characters = \"\" if", "+= letters if at_least_one_number: valid_characters += numbers if at_least_one_punctuation: valid_characters", "X.join(...) means: return all the strings in (...) joined by", "because there are lots of corner-cases. # password = \"\".join(characters)", "with a blank password and then go round enough #", "work like Python3 input = raw_input except NameError: # On", "# # random.shuffle shuffles a list *in place* # random.shuffle(characters)", "random.choice(letters) at_least_one_letter = False elif at_least_one_number: character = random.choice(numbers) at_least_one_number", "if at_least_one_punctuation: valid_characters += punctuation # # Start with a", "in (...) joined by X # \", \".join(['Eggs', 'Bacon', 'Beans'])", "each of the classes of character be used. \"\"\" #", "# because there are lots of corner-cases. # password =", "\"\" for i in range(password_length): # # Each time around,", "be used. \"\"\" # # Any combination of characters is", "to meet the password length restriction. In addition, the user", "'Beans']) => \"Eggs, Bacon, Beans\" # But if you want", "length restriction. In addition, the user can specify that at", "+= numbers if at_least_one_punctuation: valid_characters += punctuation # # Start", "i in range(password_length): # # Each time around, ensure that", "= random.choice(punctuation) at_least_one_punctuation = False else: character = random.choice(valid_characters) password", "\".join(['Eggs', 'Bacon', 'Beans']) => \"Eggs, Bacon, Beans\" # But if", "= \"\".join(characters) return password if __name__ == '__main__': password_length =", "meet the password length restriction. In addition, the user can", "if at_least_one_letter: character = random.choice(letters) at_least_one_letter = False elif at_least_one_number:", "at_least_one_letter = \"Y\" == (input(\"At least one letter [Y/n]? \").upper()", "# letter at the beginning, with a number after and", "then just choose randomly from all # groups. # if", "password = \"\" for i in range(password_length): # # Each", "\"Y\" == (input(\"At least one number [Y/n]? \").upper() or \"Y\")", "password = generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation) print(\"Your password is: {}\".format(password))", "random.shuffle shuffles a list *in place* # random.shuffle(characters) # #", "using input pass letters = string.ascii_letters numbers = string.digits punctuation", "password by include enough random characters to meet the password", "to make a password of the required length. # password", "os, sys import random import string try: # Make Python2", "\"\"\"Generate a password by include enough random characters to meet", "__name__ == '__main__': password_length = int(input(\"How many letters? \")) at_least_one_letter", "password of the required length. # password = \"\" for", "= False elif at_least_one_number: character = random.choice(numbers) at_least_one_number = False", "# Any combination of characters is valid # valid_characters =", "include enough random characters to meet the password length restriction.", "the csv module # because there are lots of corner-cases.", "\"Y\" == (input(\"At least one punctuation [Y/n]? \").upper() or \"Y\")", "[Y/n]? \").upper() or \"Y\") at_least_one_punctuation = \"Y\" == (input(\"At least", "at_least_one_letter: character = random.choice(letters) at_least_one_letter = False elif at_least_one_number: character", "a number after and some # punctuation. # characters =", "if __name__ == '__main__': password_length = int(input(\"How many letters? \"))", "of characters is valid # valid_characters = \"\" if at_least_one_letter:", "# Each time around, ensure that one of each of", "the selected # groups is chosen, and then just choose", "shuffle the password so we don't always get a #", "list(password) # # random.shuffle shuffles a list *in place* #", "character = random.choice(punctuation) at_least_one_punctuation = False else: character = random.choice(valid_characters)", "to generate *real* .csv files, use the csv module #", "\"\"\" # # Any combination of characters is valid #", "is chosen, and then just choose randomly from all #", "always get a # letter at the beginning, with a", "# Finally, shuffle the password so we don't always get", "== '__main__': password_length = int(input(\"How many letters? \")) at_least_one_letter =", "= random.choice(letters) at_least_one_letter = False elif at_least_one_number: character = random.choice(numbers)", "Beans\" # But if you want to generate *real* .csv", "\").upper() or \"Y\") at_least_one_number = \"Y\" == (input(\"At least one", "round enough # times to make a password of the", "# password = \"\".join(characters) return password if __name__ == '__main__':", "character = random.choice(valid_characters) password += character # # Finally, shuffle", "the beginning, with a number after and some # punctuation.", "range(password_length): # # Each time around, ensure that one of", "groups is chosen, and then just choose randomly from all", "with a number after and some # punctuation. # characters", "# valid_characters = \"\" if at_least_one_letter: valid_characters += letters if", "of each of the selected # groups is chosen, and", "int(input(\"How many letters? \")) at_least_one_letter = \"Y\" == (input(\"At least", "means: return all the strings in (...) joined by X", "of the selected # groups is chosen, and then just", "after and some # punctuation. # characters = list(password) #", "the password length restriction. In addition, the user can specify", "valid_characters = \"\" if at_least_one_letter: valid_characters += letters if at_least_one_number:", "characters is valid # valid_characters = \"\" if at_least_one_letter: valid_characters", "In addition, the user can specify that at least one", "one punctuation [Y/n]? \").upper() or \"Y\") password = generate(password_length, at_least_one_letter,", "# groups. # if at_least_one_letter: character = random.choice(letters) at_least_one_letter =", "= raw_input except NameError: # On Python3; already using input", "each of the selected # groups is chosen, and then", "# X.join(...) means: return all the strings in (...) joined" ]
[ "'active' == remote_status: churned_in.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] }) elif", "couldn't get the node accounts while checking version status.\") return", "if was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = False return False", "chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] # If not initialized", "= 'The Node is behind the latest block height and", "node_address): check_thorchain_block_height(context, node_address=node_address) check_thorchain_catch_up_status(context, node_address=node_address) check_thorchain_midgard_api(context, node_address=node_address) for node_address in", "last_newest_version is None or version.parse( highest_version) > version.parse(last_newest_version): chat_data['newest_software_version'] =", "get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\")", "= block_height <= node_data.setdefault('block_height', 0) block_height_stuck_count = node_data.setdefault(\"block_height_stuck_count\", 0) if", "context.bot_data.setdefault(\"insolvency_count\", 0) message = None if not is_solvent: insolvency_count +=", "again! 👌' + '\\n' + \\ 'IP: ' + node_data['ip_address']", "0: text = f\"THORNode: {local_node['alias']}\\n\" \\ f\"Address: {local_node['node_address']}\\n\" \\ f\"Status:", "\"⚖️ Bonded/Staked Ratio: *\" + '{:.2f}'.format( int(get_network_security_ratio(network) * 100)) +", "Yggdrasil.\") return None except Exception as e: logger.exception(e) return None", "f\"\\n\\n🔐 Vault Addresses:\" if \"Vault Addresses\" not in text else", "of the nodes already runs on *{highest_version}*\" try_message_with_home_menu( context, chat_id=context.job.context['chat_id'],", "+ '{:.2f}'.format( float(network['bondingAPY']) * 100) + \" %* APY\\n\\n\" \\", "\\ 'Node address: ' + node_address + '\\n' + \\", "Bonded/Staked Ratio: *\" + '{:.2f}'.format( int(get_network_security_ratio(network) * 100)) + \"", "or Connection error with {node_data['ip_address']}\") return is_stuck = block_height <=", "ConnectionError): logger.warning(f\"Timeout or Connection error while querying Asgard and Yggdrasil.\")", "!= context.bot_data['vault_addresses'][chain['chain']]: text += f\"\\n\\n🔐 Vault Addresses:\" if \"Vault Addresses\"", "version.parse( highest_version) > version.parse(last_newest_version): chat_data['newest_software_version'] = highest_version for node in", "👌\\n\" + \\ f\"IP: {node_data['ip_address']}\\n\" + \\ f\"THORNode: {node_data['alias']}\\n\" +", "block height and catching up! 💀 ' + '\\n' +", "was healhty. if \"healthy\" not in context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True", "datetime.timestamp( datetime.now() - timedelta(seconds=local_node['notification_timeout_in_seconds'])) if is_not_blocked: message = build_notification_message_for_active_node(local_node, remote_node,", "+ block_height + '\\n\\n' + \\ 'Please check your Thornode", "' + block_height + '\\n\\n' + \\ 'Please check your", "in context.bot_data: context.bot_data['node_statuses'] = {} for validator in validators: context.bot_data['node_statuses'][", "(slash_point_change <= threshold): return None if len(changed_fields) > 0: text", "node_data['block_height'] = block_height node_data[\"block_height_stuck_count\"] = block_height_stuck_count def check_solvency_job(context): message =", "else \"\" text += f\"\\n*{chain['chain']}*: \\n\" \\ f\"Old Vault address:", "ok \"\"\" chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] was_healthy =", "= context.bot_data['node_statuses'] churned_in = [] churned_out = [] highest_churn_status_since =", "= context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] if 'is_catching_up' not in node_data:", "text += \"\\nNodes Removed:\\n\" if len(churned_out) else \"\" for node", "import version from service.utils import * def check_thornodes(context): chat_id =", "{local_node['alias']}\\n\" \\ f\"Address: {local_node['node_address']}\\n\" \\ f\"Status: {local_node['status'].capitalize()}\" if 'status' in", "changed_fields) and (slash_point_change <= threshold): return None if len(changed_fields) >", "height now at: {block_height}\\n\" try_message_with_home_menu(context=context, chat_id=chat_id, text=text) block_height_stuck_count = 0", "\\n\" \\ f\"Old Vault address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\" \\ f\"New Vault", "if message: # Update data local_node['status'] = remote_node['status'] local_node['bond'] =", "\"address\": validator['node_address'], \"bond\": validator['bond'] }) elif 'active' == local_status: churned_out.append({", "'{:,}'.format(int(remote_node['slash_points'])) return text else: return None def check_versions_status(context): chat_data =", "INITIAL_NOTIFICATION_TIMEOUT if local_node['status'].upper() in MONITORED_STATUSES and is_thornode_healthy(context, node_address): check_thorchain_block_height(context, node_address=node_address)", "+ \\ 'Current block height: ' + block_height try_message_with_home_menu(context=context, chat_id=chat_id,", "is_stuck = block_height <= node_data.setdefault('block_height', 0) block_height_stuck_count = node_data.setdefault(\"block_height_stuck_count\", 0)", "is_stuck: block_height_stuck_count += 1 if block_height_stuck_count == 1: text =", "increasing anymore! 💀' + '\\n' + \\ 'IP: ' +", "💀' + '\\n' + \\ 'Address: ' + node_address +", "healthy again! 👌' + '\\n' + \\ 'IP: ' +", "\"↩️ Liquidity ROI: *\" + '{:.2f}'.format( float(network['liquidityAPY']) * 100) +", "context.job.context['chat_data']['nodes'][node_address] # If not initialized assuming node was healhty. if", "threshold = get_slash_points_threshold(context) slash_point_change = abs(int(local_node['slash_points']) - int(remote_node['slash_points'])) if (len(changed_fields)", "'Node address: ' + node_address try_message_with_home_menu(context, chat_id=chat_id, text=text) else: text", "if validator[ 'node_address'] in local_node_statuses else \"unknown\" if int(validator['status_since']) >", "'slash_points' in changed_fields: text += ' ➡️ ' + '{:,}'.format(int(remote_node['slash_points']))", "build_notification_message_for_active_node(local_node, remote_node, context) -> [str, None]: changed_fields = [ field", "Update data local_node['status'] = remote_node['status'] local_node['bond'] = remote_node['bond'] local_node['slash_points'] =", "local_node['bond'] = remote_node['bond'] local_node['slash_points'] = remote_node['slash_points'] local_node['ip_address'] = remote_node['ip_address'] local_node['last_notification_timestamp']", "f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\" if 'bond' in changed_fields: text += f\" ➡️", "'THORChain is *100% solvent* again! 👌\\n' insolvency_count = 0 context.bot_data[\"insolvency_count\"]", "\"unknown\" if remote_status != local_status: if 'active' == remote_status: churned_in.append({", "= node_data[\"healthy\"] try: # Check whether node answers. If it", "context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout,", "100) + \" %* APY\" context.bot_data.setdefault(\"vault_addresses\", {}) current_chains = get_pool_addresses_from_any_node()", "handlers.chat_helpers import try_message_with_home_menu, try_message_to_all_users from packaging import version from service.utils", "'THORNode ' + local_node['alias'] + ' is not active anymore!", "on *{highest_version}*\" try_message_with_home_menu( context, chat_id=context.job.context['chat_id'], text=message) def check_churning(context): try: validators", "churned_out: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nSystem:\\n\" try: network", "block_height = \"currently unavailable\" if is_currently_catching_up: node_data['is_catching_up'] = True text", "else: text += \"\\n\\n⚠️ 🚨 CHURNING BUT THE VAULT ADDRESSES", "del chat_data['nodes'][node_address] def build_notification_message_for_active_node(local_node, remote_node, context) -> [str, None]: changed_fields", "1: text = 'Block height is not increasing anymore! 💀'", "'Address: ' + node_address + '\\n\\n' + \\ 'Please enter", "None is_solvent = asgard_solvency['is_solvent'] and yggdrasil_solvency['is_solvent'] insolvency_count = context.bot_data.setdefault(\"insolvency_count\", 0)", "def did_churn_happen(validator, local_node_statuses, highest_churn_status_since) -> bool: remote_status = validator['status'] local_status", "\"\\nNodes Removed:\\n\" if len(churned_out) else \"\" for node in churned_out:", "THE VAULT ADDRESSES DID NOT CHANGE 🚨\\n\" context.bot_data['vault_addresses'][chain['chain']] = chain['address']", "couldn't get the node accounts while checking if churning occurred.\")", "node_data = context.job.context['chat_data']['nodes'][node_address] try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError):", "'Block height is not increasing anymore! 💀' + '\\n' +", "None]: try: asgard_solvency = asgard_solvency_check() yggdrasil_solvency = yggdrasil_solvency_check() except (Timeout,", "Connection error with {node_data['ip_address']}\") block_height = \"currently unavailable\" if is_currently_catching_up:", "' + node_address + '\\n' + \\ 'Block height stuck", "text=text) else: text = 'Midgard API is not healthy anymore!", "local_node_statuses[validator['node_address']] if validator[ 'node_address'] in local_node_statuses else \"unknown\" if int(validator['status_since'])", "if node is some blocks behind with catch up status", "== 'active' and remote_status == 'standby')): return True return False", "<reponame>block42-blockchain-company/thornode-telegram-bot from constants.messages import get_node_health_warning_message, get_node_healthy_again_message from handlers.chat_helpers import try_message_with_home_menu,", "{tor_to_rune(int(remote_node['bond']))}\" text += '\\nSlash Points: ' + '{:,}'.format(int(local_node['slash_points'])) if 'slash_points'", "local_node_statuses else \"unknown\" if int(validator['status_since']) > highest_churn_status_since and \\ ((local_status", "block_height node_data[\"block_height_stuck_count\"] = block_height_stuck_count def check_solvency_job(context): message = check_solvency(context) if", "context) if message: # Update data local_node['status'] = remote_node['status'] local_node['bond']", "not was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True return True", "= \"🔄 CHURN SUMMARY\\n\" \\ \"THORChain has successfully churned:\\n\\n\" text", "try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection", "text=text) block_height_stuck_count = 0 node_data['block_height'] = block_height node_data[\"block_height_stuck_count\"] = block_height_stuck_count", "e: logger.exception(e) return None is_solvent = asgard_solvency['is_solvent'] and yggdrasil_solvency['is_solvent'] insolvency_count", "e: logger.exception(e) try_message_to_all_users(context, text=text) for validator in validators: context.bot_data['node_statuses'][ validator['node_address']]", "= node_data.setdefault('is_midgard_healthy', True) is_midgard_healthy = is_midgard_api_healthy(node_data['ip_address']) if was_healthy != is_midgard_healthy:", "'\\n' + \\ 'Current block height: ' + block_height try_message_with_home_menu(context=context,", "text += f\"\\n*{chain['chain']}*: \\n\" \\ f\"Old Vault address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\"", "try_message_to_all_users from packaging import version from service.utils import * def", "➡️ ' + '{:,}'.format(int(remote_node['slash_points'])) return text else: return None def", "in text else \"\" text += f\"\\n*{chain['chain']}*: \\n\" \\ f\"Old", "or ( local_status == 'active' and remote_status == 'standby')): return", "message = f\"Consider updating the software on your node: *{node['alias']}*", "Vault address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\" \\ f\"New Vault address: {chain['address']}\\n\" else:", "False def check_thorchain_block_height(context, node_address): chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address]", "funds*! 💀\\n\\n' message += get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency) else: if insolvency_count >=", "except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") block_height", "+ '\\n' + \\ 'Current block height: ' + block_height", "for node in churned_in: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text +=", "!= is_midgard_healthy: if is_midgard_healthy: text = 'Midgard API is healthy", "and catching up! 💀 ' + '\\n' + \\ 'IP:", "f\"but one of the nodes already runs on *{highest_version}*\" try_message_with_home_menu(", "healthy anymore! 💀' + '\\n' + \\ 'IP: ' +", "and yggdrasil_solvency['is_solvent'] insolvency_count = context.bot_data.setdefault(\"insolvency_count\", 0) message = None if", "{local_node['node_address']}\\n\" \\ f\"Status: {local_node['status'].capitalize()}\" if 'status' in changed_fields: text +=", "active anymore! 💀' + '\\n' + \\ 'Address: ' +", "text = \"🔄 CHURN SUMMARY\\n\" \\ \"THORChain has successfully churned:\\n\\n\"", "except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") return", "Exception as e: logger.exception(e) return None is_solvent = asgard_solvency['is_solvent'] and", "threshold): return None if len(changed_fields) > 0: text = f\"THORNode:", "runs on *{highest_version}*\" try_message_with_home_menu( context, chat_id=context.job.context['chat_id'], text=message) def check_churning(context): try:", "{} for validator in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] return", "local_node_statuses = context.bot_data['node_statuses'] churned_in = [] churned_out = [] highest_churn_status_since", "{context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\" \\ f\"New Vault address: {chain['address']}\\n\" else: text +=", "'Node address: ' + node_address + '\\n\\n' + \\ 'Please", "in context.bot_data['vault_addresses']: if chain['address'] != context.bot_data['vault_addresses'][chain['chain']]: text += f\"\\n\\n🔐 Vault", "+ node_address + '\\n' + \\ 'Current block height: '", "validator[ 'node_address'] in local_node_statuses else \"unknown\" if remote_status != local_status:", "💀' + '\\n' + \\ 'IP: ' + node_data['ip_address'] +", "DID NOT CHANGE 🚨\\n\" context.bot_data['vault_addresses'][chain['chain']] = chain['address'] except Exception as", "= get_slash_points_threshold(context) slash_point_change = abs(int(local_node['slash_points']) - int(remote_node['slash_points'])) if (len(changed_fields) <=", "text else \"\" text += f\"\\n*{chain['chain']}*: \\n\" \\ f\"Old Vault", "'The node caught up to the latest block height again!", "(len(changed_fields) <= 1) and ('slash_points' in changed_fields) and (slash_point_change <=", "= remote_node['bond'] local_node['slash_points'] = remote_node['slash_points'] local_node['ip_address'] = remote_node['ip_address'] local_node['last_notification_timestamp'] =", "as e: logger.exception(e) continue if remote_node is None: text =", "def check_versions_status(context): chat_data = context.job.context['chat_data'] try: node_accounts = get_node_accounts() except", "elif 'active' == local_status: churned_out.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] })", "we get an Exception. get_latest_block_height(node_data['ip_address']) if not was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id,", "is behind the latest block height and catching up! 💀", "Check that Midgard API is ok \"\"\" chat_id = context.job.context['chat_id']", "*{tor_to_rune(node['bond'])}*\\n\" text += \"\\nNodes Removed:\\n\" if len(churned_out) else \"\" for", "is_midgard_healthy: if is_midgard_healthy: text = 'Midgard API is healthy again!", "= local_node_statuses[validator['node_address']] if validator[ 'node_address'] in local_node_statuses else \"unknown\" if", "0 context.bot_data[\"insolvency_count\"] = insolvency_count return message def check_thorchain_catch_up_status(context, node_address): \"\"\"", "highest_churn_status_since = int(validator['status_since']) for validator in validators: remote_status = validator['status']", "node_address): chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] try: block_height =", "if is_stuck: block_height_stuck_count += 1 if block_height_stuck_count == 1: text", "If it doesn't we get an Exception. get_latest_block_height(node_data['ip_address']) if not", "version is *{node['version']}* \" \\ f\"but one of the nodes", "try_message_with_home_menu(context=context, chat_id=chat_id, text=text) block_height_stuck_count = 0 node_data['block_height'] = block_height node_data[\"block_height_stuck_count\"]", "= f\"THORNode: {local_node['alias']}\\n\" \\ f\"Address: {local_node['node_address']}\\n\" \\ f\"Status: {local_node['status'].capitalize()}\" if", "return text else: return None def check_versions_status(context): chat_data = context.job.context['chat_data']", "already runs on *{highest_version}*\" try_message_with_home_menu( context, chat_id=context.job.context['chat_id'], text=message) def check_churning(context):", "context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True return True except (Timeout, ConnectionError, BadStatusException, Exception):", "was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = False return False def", "\\ 'Please enter another THORNode address.' inactive_nodes.append(node_address) try_message_with_home_menu(context=context, chat_id=chat_id, text=text)", "in chat_data.get('nodes', {}).values(): if version.parse(node['version']) < version.parse(highest_version): message = f\"Consider", "text += '\\nSlash Points: ' + '{:,}'.format(int(local_node['slash_points'])) if 'slash_points' in", "{local_node['status'].capitalize()}\" if 'status' in changed_fields: text += f' ➡️ {remote_node[\"status\"].capitalize()}'", "= context.job.context['chat_data']['nodes'][node_address] if 'is_catching_up' not in node_data: node_data['is_catching_up'] = False", "node_address + '\\n' + \\ 'Current block height: ' +", "'Current block height: ' + block_height + '\\n\\n' + \\", "text = f\"Block height is increasing again! 👌\\n\" + \\", "text = 'Midgard API is not healthy anymore! 💀' +", "node_data['is_catching_up'] = False text = 'The node caught up to", "\"currently unavailable\" if is_currently_catching_up: node_data['is_catching_up'] = True text = 'The", "+ '{:.2f}'.format( float(network['liquidityAPY']) * 100) + \" %* APY\" context.bot_data.setdefault(\"vault_addresses\",", "{chain['address']}\\n\" else: text += \"\\n\\n⚠️ 🚨 CHURNING BUT THE VAULT", "{node_data['ip_address']}\") block_height = \"currently unavailable\" if is_currently_catching_up: node_data['is_catching_up'] = True", "if last_newest_version is None or version.parse( highest_version) > version.parse(last_newest_version): chat_data['newest_software_version']", "address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\" \\ f\"New Vault address: {chain['address']}\\n\" else: text", "was_healthy = node_data[\"healthy\"] try: # Check whether node answers. If", "querying Asgard and Yggdrasil.\") return None except Exception as e:", "else: local_node['notification_timeout_in_seconds'] = INITIAL_NOTIFICATION_TIMEOUT if local_node['status'].upper() in MONITORED_STATUSES and is_thornode_healthy(context,", "= 'THORChain is *missing funds*! 💀\\n\\n' message += get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency)", "= context.job.context['chat_data'] inactive_nodes = [] for node_address, local_node in chat_data.get('nodes',", "* 100) + \" %* APY\\n\\n\" \\ \"↩️ Liquidity ROI:", "{node_address}\\n\" + \\ f\"Block height now at: {block_height}\\n\" try_message_with_home_menu(context=context, chat_id=chat_id,", "the latest block height again! 👌' + '\\n' + \\", "*100% solvent* again! 👌\\n' insolvency_count = 0 context.bot_data[\"insolvency_count\"] = insolvency_count", "= {} for validator in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status']", "'is_catching_up' not in node_data: node_data['is_catching_up'] = False try: is_currently_catching_up =", "for node_address, local_node in chat_data.get('nodes', {}).items(): try: remote_node = get_thornode_object_or_none(address=node_address)", "'active') or ( local_status == 'active' and remote_status == 'standby')):", "validator['node_address'], \"bond\": validator['bond'] }) elif 'active' == local_status: churned_out.append({ \"address\":", "node_address=node_address) check_thorchain_catch_up_status(context, node_address=node_address) check_thorchain_midgard_api(context, node_address=node_address) for node_address in inactive_nodes: del", "get_node_healthy_again_message from handlers.chat_helpers import try_message_with_home_menu, try_message_to_all_users from packaging import version", "def check_solvency_job(context): message = check_solvency(context) if message: try_message_to_all_users(context, text=message) def", "logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") block_height = \"currently unavailable\"", "is not healthy anymore! 💀' + '\\n' + \\ 'IP:", "updating the software on your node: *{node['alias']}* ‼️\\n\" \\ f\"Your", "last_newest_version = chat_data.get('newest_software_version', None) if last_newest_version is None or version.parse(", "= is_midgard_api_healthy(node_data['ip_address']) if was_healthy != is_midgard_healthy: if is_midgard_healthy: text =", "'Please enter another THORNode address.' inactive_nodes.append(node_address) try_message_with_home_menu(context=context, chat_id=chat_id, text=text) continue", "in changed_fields: text += f' ➡️ {remote_node[\"status\"].capitalize()}' text += f\"\\nBond:", "'Block height stuck at: ' + block_height + '\\n\\n' +", "<= threshold): return None if len(changed_fields) > 0: text =", "context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] return local_node_statuses = context.bot_data['node_statuses'] churned_in =", "= context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] # If not initialized assuming", "doesn't we get an Exception. get_latest_block_height(node_data['ip_address']) if not was_healthy: try_message_with_home_menu(context=context,", "+= f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\" if 'bond' in changed_fields: text += f\"", "\"\" text += f\"\\n*{chain['chain']}*: \\n\" \\ f\"Old Vault address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\", "float(network['bondingAPY']) * 100) + \" %* APY\\n\\n\" \\ \"↩️ Liquidity", "False def is_thornode_healthy(context, node_address) -> bool: chat_id = context.job.context['chat_id'] node_data", "\\ f\"Address: {local_node['node_address']}\\n\" \\ f\"Status: {local_node['status'].capitalize()}\" if 'status' in changed_fields:", "another THORNode address.' inactive_nodes.append(node_address) try_message_with_home_menu(context=context, chat_id=chat_id, text=text) continue is_not_blocked =", "Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\" \\ \"⚖️ Bonded/Staked Ratio: *\" + '{:.2f}'.format(", "check_thorchain_midgard_api(context, node_address): \"\"\" Check that Midgard API is ok \"\"\"", "in validators: remote_status = validator['status'] local_status = local_node_statuses[ validator['node_address']] if", "+ '\\n' + \\ 'THORNode: ' + node_data['alias'] + '\\n'", "None) if last_newest_version is None or version.parse( highest_version) > version.parse(last_newest_version):", "0) if is_stuck: block_height_stuck_count += 1 if block_height_stuck_count == 1:", "local_node['last_notification_timestamp'] = datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds'] *= NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context, chat_id=chat_id, text=message) else:", "block_height try_message_with_home_menu(context=context, chat_id=chat_id, text=text) def check_thorchain_midgard_api(context, node_address): \"\"\" Check that", "= remote_node['slash_points'] local_node['ip_address'] = remote_node['ip_address'] local_node['last_notification_timestamp'] = datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds'] *=", "'The Node is behind the latest block height and catching", "context.job.context['chat_data'] inactive_nodes = [] for node_address, local_node in chat_data.get('nodes', {}).items():", "chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] was_healthy = node_data.setdefault('is_midgard_healthy', True)", "node_data.setdefault('is_midgard_healthy', True) is_midgard_healthy = is_midgard_api_healthy(node_data['ip_address']) if was_healthy != is_midgard_healthy: if", "inactive_nodes: del chat_data['nodes'][node_address] def build_notification_message_for_active_node(local_node, remote_node, context) -> [str, None]:", "for validator in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] def did_churn_happen(validator,", "%*\\n\\n\" \\ \"↩️ Bonding ROI: *\" + '{:.2f}'.format( float(network['bondingAPY']) *", "* def check_thornodes(context): chat_id = context.job.context['chat_id'] chat_data = context.job.context['chat_data'] inactive_nodes", "\\ \"THORChain has successfully churned:\\n\\n\" text += \"Nodes Added:\\n\" if", "+ '\\n' + \\ 'Block height stuck at: ' +", "if block_height_stuck_count >= 1: text = f\"Block height is increasing", "service.utils import * def check_thornodes(context): chat_id = context.job.context['chat_id'] chat_data =", "\\ 'Current block height: ' + block_height + '\\n\\n' +", "validators = get_node_accounts() except Exception as e: logger.exception(e) logger.error(\"I couldn't", "+ '\\n\\n' + \\ 'Please check your Thornode immediately!' try_message_with_home_menu(context=context,", "error while querying Asgard and Yggdrasil.\") return None except Exception", "' + node_address try_message_with_home_menu(context, chat_id=chat_id, text=text) else: text = 'Midgard", "= validator['status'] local_status = local_node_statuses[validator['node_address']] if validator[ 'node_address'] in local_node_statuses", "and ('slash_points' in changed_fields) and (slash_point_change <= threshold): return None", "in changed_fields: text += ' ➡️ ' + '{:,}'.format(int(remote_node['slash_points'])) return", "f\"Old Vault address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\" \\ f\"New Vault address: {chain['address']}\\n\"", "== remote_status: churned_in.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] }) elif 'active'", "node was healhty. if \"healthy\" not in context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"] =", "'Please check your Thornode immediately!' try_message_with_home_menu(context, chat_id=chat_id, text=text) node_data['is_midgard_healthy'] =", "Addresses:\" if \"Vault Addresses\" not in text else \"\" text", "logger.exception(e) return None is_solvent = asgard_solvency['is_solvent'] and yggdrasil_solvency['is_solvent'] insolvency_count =", "else \"\" for node in churned_out: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\"", "= True text = 'The Node is behind the latest", "\\ f\"💚 Total Active Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\" \\ \"⚖️ Bonded/Staked", "100)) + \" %*\\n\\n\" \\ \"↩️ Bonding ROI: *\" +", "node in churned_in: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nNodes", "from constants.messages import get_node_health_warning_message, get_node_healthy_again_message from handlers.chat_helpers import try_message_with_home_menu, try_message_to_all_users", "= remote_node['ip_address'] local_node['last_notification_timestamp'] = datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds'] *= NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context, chat_id=chat_id,", "\\ f\"Your software version is *{node['version']}* \" \\ f\"but one", "immediately!' try_message_with_home_menu(context=context, chat_id=chat_id, text=text) else: if block_height_stuck_count >= 1: text", "Node is behind the latest block height and catching up!", "validator['node_address']] = validator['status'] return local_node_statuses = context.bot_data['node_statuses'] churned_in = []", "logger.error(\"I couldn't get the node accounts while checking version status.\")", "+= \"Nodes Added:\\n\" if len(churned_in) else \"\" for node in", "+= \"\\n\\n⚠️ 🚨 CHURNING BUT THE VAULT ADDRESSES DID NOT", "in current_chains: if chain['chain'] in context.bot_data['vault_addresses']: if chain['address'] != context.bot_data['vault_addresses'][chain['chain']]:", "or len(churned_out): text = \"🔄 CHURN SUMMARY\\n\" \\ \"THORChain has", "is_thornode_healthy(context, node_address): check_thorchain_block_height(context, node_address=node_address) check_thorchain_catch_up_status(context, node_address=node_address) check_thorchain_midgard_api(context, node_address=node_address) for node_address", "NOT CHANGE 🚨\\n\" context.bot_data['vault_addresses'][chain['chain']] = chain['address'] except Exception as e:", "logger.error(\"I couldn't get the node accounts while checking if churning", "= local_node_statuses[ validator['node_address']] if validator[ 'node_address'] in local_node_statuses else \"unknown\"", "get_slash_points_threshold(context) slash_point_change = abs(int(local_node['slash_points']) - int(remote_node['slash_points'])) if (len(changed_fields) <= 1)", "\\ \"↩️ Bonding ROI: *\" + '{:.2f}'.format( float(network['bondingAPY']) * 100)", "local_status == 'active' and remote_status == 'standby')): return True return", "block_height <= node_data.setdefault('block_height', 0) block_height_stuck_count = node_data.setdefault(\"block_height_stuck_count\", 0) if is_stuck:", "is not active anymore! 💀' + '\\n' + \\ 'Address:", "Connection error with {node_data['ip_address']}\") return if node_data['is_catching_up'] != is_currently_catching_up: try:", "chain['address'] != context.bot_data['vault_addresses'][chain['chain']]: text += f\"\\n\\n🔐 Vault Addresses:\" if \"Vault", "\\ 'Please check your Thornode immediately!' try_message_with_home_menu(context, chat_id=chat_id, text=text) node_data['is_midgard_healthy']", "try_message_with_home_menu(context=context, chat_id=chat_id, text=text) else: if block_height_stuck_count >= 1: text =", "= True return True except (Timeout, ConnectionError, BadStatusException, Exception): if", "context.job.context['chat_data']['nodes'][node_address] if 'is_catching_up' not in node_data: node_data['is_catching_up'] = False try:", "insolvency_count >= MISSING_FUNDS_THRESHOLD: message = 'THORChain is *100% solvent* again!", "node_data.setdefault('block_height', 0) block_height_stuck_count = node_data.setdefault(\"block_height_stuck_count\", 0) if is_stuck: block_height_stuck_count +=", "BUT THE VAULT ADDRESSES DID NOT CHANGE 🚨\\n\" context.bot_data['vault_addresses'][chain['chain']] =", "True except (Timeout, ConnectionError, BadStatusException, Exception): if was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id,", "for validator in validators: if did_churn_happen(validator, local_node_statuses, highest_churn_status_since): highest_churn_status_since =", "+ \\ 'Please check your Thornode immediately!' try_message_with_home_menu(context=context, chat_id=chat_id, text=text)", "'Please check your Thornode immediately!' try_message_with_home_menu(context=context, chat_id=chat_id, text=text) else: if", "\"\" for node in churned_in: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text", "' + '{:,}'.format(int(remote_node['slash_points'])) return text else: return None def check_versions_status(context):", "+ \" %* APY\\n\\n\" \\ \"↩️ Liquidity ROI: *\" +", "local_node['status'].upper() in MONITORED_STATUSES and is_thornode_healthy(context, node_address): check_thorchain_block_height(context, node_address=node_address) check_thorchain_catch_up_status(context, node_address=node_address)", "key=lambda v: version.parse(v)) last_newest_version = chat_data.get('newest_software_version', None) if last_newest_version is", "node answers. If it doesn't we get an Exception. get_latest_block_height(node_data['ip_address'])", "!= remote_node[field] ] threshold = get_slash_points_threshold(context) slash_point_change = abs(int(local_node['slash_points']) -", "if (len(changed_fields) <= 1) and ('slash_points' in changed_fields) and (slash_point_change", "network = get_network_data() text += f\"📡 Network Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\", "else: if block_height_stuck_count >= 1: text = f\"Block height is", "get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency) else: if insolvency_count >= MISSING_FUNDS_THRESHOLD: message = 'THORChain", "return highest_version = max(map(lambda n: n['version'], node_accounts), key=lambda v: version.parse(v))", "{block_height}\\n\" try_message_with_home_menu(context=context, chat_id=chat_id, text=text) block_height_stuck_count = 0 node_data['block_height'] = block_height", "get_pool_addresses_from_any_node() for chain in current_chains: if chain['chain'] in context.bot_data['vault_addresses']: if", "= insolvency_count return message def check_thorchain_catch_up_status(context, node_address): \"\"\" Check if", ">= MISSING_FUNDS_THRESHOLD: message = 'THORChain is *100% solvent* again! 👌\\n'", "try_message_to_all_users(context, text=text) for validator in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status']", "['status', 'bond', 'slash_points'] if local_node[field] != remote_node[field] ] threshold =", "'THORNode: ' + node_data['alias'] + '\\n' + \\ 'Node address:", "def check_thorchain_block_height(context, node_address): chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] try:", "\"\\n\\n⚠️ 🚨 CHURNING BUT THE VAULT ADDRESSES DID NOT CHANGE", "f\"THORNode: {node_data['alias']}\\n\" + \\ f\"Node address: {node_address}\\n\" + \\ f\"Block", "if was_healthy != is_midgard_healthy: if is_midgard_healthy: text = 'Midgard API", "continue if remote_node is None: text = 'THORNode ' +", "= f\"Consider updating the software on your node: *{node['alias']}* ‼️\\n\"", "is *{node['version']}* \" \\ f\"but one of the nodes already", "len(churned_in) else \"\" for node in churned_in: text += f\"*{node['address']}*\\nBond:", "\"Vault Addresses\" not in text else \"\" text += f\"\\n*{chain['chain']}*:", "Vault address: {chain['address']}\\n\" else: text += \"\\n\\n⚠️ 🚨 CHURNING BUT", "bool: remote_status = validator['status'] local_status = local_node_statuses[validator['node_address']] if validator[ 'node_address']", "+ \\ f\"THORNode: {node_data['alias']}\\n\" + \\ f\"Node address: {node_address}\\n\" +", "except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error while querying Asgard", "= is_thorchain_catching_up( node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error", "(Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") return if", "else: text = 'Midgard API is not healthy anymore! 💀'", "solvent* again! 👌\\n' insolvency_count = 0 context.bot_data[\"insolvency_count\"] = insolvency_count return", "e: logger.exception(e) logger.error(\"I couldn't get the node accounts while checking", "+ \\ 'Please check your Thornode immediately!' try_message_with_home_menu(context, chat_id=chat_id, text=text)", "assuming node was healhty. if \"healthy\" not in context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"]", "local_node['alias'] + ' is not active anymore! 💀' + '\\n'", "def check_thornodes(context): chat_id = context.job.context['chat_id'] chat_data = context.job.context['chat_data'] inactive_nodes =", "Thornode immediately!' try_message_with_home_menu(context=context, chat_id=chat_id, text=text) else: if block_height_stuck_count >= 1:", "if is_currently_catching_up: node_data['is_catching_up'] = True text = 'The Node is", "= max(map(lambda n: n['version'], node_accounts), key=lambda v: version.parse(v)) last_newest_version =", "check your Thornode immediately!' try_message_with_home_menu(context, chat_id=chat_id, text=text) node_data['is_midgard_healthy'] = is_midgard_healthy", "packaging import version from service.utils import * def check_thornodes(context): chat_id", "validator['status'] return local_node_statuses = context.bot_data['node_statuses'] churned_in = [] churned_out =", "= remote_node['status'] local_node['bond'] = remote_node['bond'] local_node['slash_points'] = remote_node['slash_points'] local_node['ip_address'] =", "'\\n' + \\ 'Block height stuck at: ' + block_height", "software version is *{node['version']}* \" \\ f\"but one of the", "from handlers.chat_helpers import try_message_with_home_menu, try_message_to_all_users from packaging import version from", "context.bot_data[\"insolvency_count\"] = insolvency_count return message def check_thorchain_catch_up_status(context, node_address): \"\"\" Check", "as e: logger.exception(e) logger.error(\"I couldn't get the node accounts while", "chat_id=chat_id, text=message) else: local_node['notification_timeout_in_seconds'] = INITIAL_NOTIFICATION_TIMEOUT if local_node['status'].upper() in MONITORED_STATUSES", "def check_churning(context): try: validators = get_node_accounts() except Exception as e:", "local_node_statuses else \"unknown\" if remote_status != local_status: if 'active' ==", "( local_status == 'active' and remote_status == 'standby')): return True", "is None or version.parse( highest_version) > version.parse(last_newest_version): chat_data['newest_software_version'] = highest_version", "\"Nodes Added:\\n\" if len(churned_in) else \"\" for node in churned_in:", "get_network_data() text += f\"📡 Network Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\ f\"💚 Total", "local_node[field] != remote_node[field] ] threshold = get_slash_points_threshold(context) slash_point_change = abs(int(local_node['slash_points'])", "for field in ['status', 'bond', 'slash_points'] if local_node[field] != remote_node[field]", "- int(remote_node['slash_points'])) if (len(changed_fields) <= 1) and ('slash_points' in changed_fields)", "True return False def is_thornode_healthy(context, node_address) -> bool: chat_id =", "while checking version status.\") return highest_version = max(map(lambda n: n['version'],", "= chat_data.get('newest_software_version', None) if last_newest_version is None or version.parse( highest_version)", "text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nSystem:\\n\" try: network =", "asgard_solvency['is_solvent'] and yggdrasil_solvency['is_solvent'] insolvency_count = context.bot_data.setdefault(\"insolvency_count\", 0) message = None", "if insolvency_count >= MISSING_FUNDS_THRESHOLD: message = 'THORChain is *100% solvent*", "chain['address'] except Exception as e: logger.exception(e) try_message_to_all_users(context, text=text) for validator", "timedelta(seconds=local_node['notification_timeout_in_seconds'])) if is_not_blocked: message = build_notification_message_for_active_node(local_node, remote_node, context) if message:", "👌' + '\\n' + \\ 'IP: ' + node_data['ip_address'] +", "-> bool: chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] # If", "for validator in validators: remote_status = validator['status'] local_status = local_node_statuses[", "in MONITORED_STATUSES and is_thornode_healthy(context, node_address): check_thorchain_block_height(context, node_address=node_address) check_thorchain_catch_up_status(context, node_address=node_address) check_thorchain_midgard_api(context,", "'Node address: ' + node_address + '\\n' + \\ 'Block", "Thornode immediately!' else: node_data['is_catching_up'] = False text = 'The node", "has successfully churned:\\n\\n\" text += \"Nodes Added:\\n\" if len(churned_in) else", "API is ok \"\"\" chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address]", "True text = 'The Node is behind the latest block", "was_healthy != is_midgard_healthy: if is_midgard_healthy: text = 'Midgard API is", "import get_node_health_warning_message, get_node_healthy_again_message from handlers.chat_helpers import try_message_with_home_menu, try_message_to_all_users from packaging", "= [ field for field in ['status', 'bond', 'slash_points'] if", "Connection error while querying Asgard and Yggdrasil.\") return None except", "software on your node: *{node['alias']}* ‼️\\n\" \\ f\"Your software version", "\\ datetime.timestamp( datetime.now() - timedelta(seconds=local_node['notification_timeout_in_seconds'])) if is_not_blocked: message = build_notification_message_for_active_node(local_node,", "\" \\ f\"but one of the nodes already runs on", "NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context, chat_id=chat_id, text=message) else: local_node['notification_timeout_in_seconds'] = INITIAL_NOTIFICATION_TIMEOUT if local_node['status'].upper()", "float(network['liquidityAPY']) * 100) + \" %* APY\" context.bot_data.setdefault(\"vault_addresses\", {}) current_chains", "insolvency_count = context.bot_data.setdefault(\"insolvency_count\", 0) message = None if not is_solvent:", "catch up status \"\"\" chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address]", "text += f\" ➡️ {tor_to_rune(int(remote_node['bond']))}\" text += '\\nSlash Points: '", "insolvency_count = 0 context.bot_data[\"insolvency_count\"] = insolvency_count return message def check_thorchain_catch_up_status(context,", "= context.job.context['chat_id'] chat_data = context.job.context['chat_data'] inactive_nodes = [] for node_address,", "'\\n' + \\ 'Node address: ' + node_address try_message_with_home_menu(context, chat_id=chat_id,", "highest_version = max(map(lambda n: n['version'], node_accounts), key=lambda v: version.parse(v)) last_newest_version", "behind the latest block height and catching up! 💀 '", "if is_not_blocked: message = build_notification_message_for_active_node(local_node, remote_node, context) if message: #", "\\ f\"IP: {node_data['ip_address']}\\n\" + \\ f\"THORNode: {node_data['alias']}\\n\" + \\ f\"Node", "if 'status' in changed_fields: text += f' ➡️ {remote_node[\"status\"].capitalize()}' text", "' ➡️ ' + '{:,}'.format(int(remote_node['slash_points'])) return text else: return None", "checking if churning occurred.\") return if 'node_statuses' not in context.bot_data:", "context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] def did_churn_happen(validator, local_node_statuses, highest_churn_status_since) -> bool:", "your Thornode immediately!' else: node_data['is_catching_up'] = False text = 'The", "+ node_address try_message_with_home_menu(context, chat_id=chat_id, text=text) else: text = 'Midgard API", "100) + \" %* APY\\n\\n\" \\ \"↩️ Liquidity ROI: *\"", "field in ['status', 'bond', 'slash_points'] if local_node[field] != remote_node[field] ]", "+ node_data['ip_address'] + '\\n' + \\ 'THORNode: ' + node_data['alias']", "current_chains: if chain['chain'] in context.bot_data['vault_addresses']: if chain['address'] != context.bot_data['vault_addresses'][chain['chain']]: text", "inactive_nodes = [] for node_address, local_node in chat_data.get('nodes', {}).items(): try:", "highest_churn_status_since): highest_churn_status_since = int(validator['status_since']) for validator in validators: remote_status =", "changed_fields: text += ' ➡️ ' + '{:,}'.format(int(remote_node['slash_points'])) return text", "and (slash_point_change <= threshold): return None if len(changed_fields) > 0:", "context.bot_data['vault_addresses'][chain['chain']]: text += f\"\\n\\n🔐 Vault Addresses:\" if \"Vault Addresses\" not", "= chain['address'] except Exception as e: logger.exception(e) try_message_to_all_users(context, text=text) for", "is None: text = 'THORNode ' + local_node['alias'] + '", "= get_pool_addresses_from_any_node() for chain in current_chains: if chain['chain'] in context.bot_data['vault_addresses']:", "f\" ➡️ {tor_to_rune(int(remote_node['bond']))}\" text += '\\nSlash Points: ' + '{:,}'.format(int(local_node['slash_points']))", "text = 'The Node is behind the latest block height", "return False def check_thorchain_block_height(context, node_address): chat_id = context.job.context['chat_id'] node_data =", "'slash_points'] if local_node[field] != remote_node[field] ] threshold = get_slash_points_threshold(context) slash_point_change", "at: {block_height}\\n\" try_message_with_home_menu(context=context, chat_id=chat_id, text=text) block_height_stuck_count = 0 node_data['block_height'] =", "return True except (Timeout, ConnectionError, BadStatusException, Exception): if was_healthy: try_message_with_home_menu(context=context,", "'{:.2f}'.format( float(network['liquidityAPY']) * 100) + \" %* APY\" context.bot_data.setdefault(\"vault_addresses\", {})", "except Exception as e: logger.exception(e) logger.error(\"I couldn't get the node", "import try_message_with_home_menu, try_message_to_all_users from packaging import version from service.utils import", "context.bot_data: context.bot_data['node_statuses'] = {} for validator in validators: context.bot_data['node_statuses'][ validator['node_address']]", "anymore! 💀' + '\\n' + \\ 'IP: ' + node_data['ip_address']", "try_message_with_home_menu(context=context, chat_id=chat_id, text=text) def check_thorchain_midgard_api(context, node_address): \"\"\" Check that Midgard", "'Midgard API is not healthy anymore! 💀' + '\\n' +", "chat_id=chat_id, text=text) continue is_not_blocked = float(local_node['last_notification_timestamp']) < \\ datetime.timestamp( datetime.now()", "data local_node['status'] = remote_node['status'] local_node['bond'] = remote_node['bond'] local_node['slash_points'] = remote_node['slash_points']", "f\"Your software version is *{node['version']}* \" \\ f\"but one of", "text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nNodes Removed:\\n\" if len(churned_out)", "Check whether node answers. If it doesn't we get an", "address: {chain['address']}\\n\" else: text += \"\\n\\n⚠️ 🚨 CHURNING BUT THE", "check_solvency(context) -> [str, None]: try: asgard_solvency = asgard_solvency_check() yggdrasil_solvency =", "Connection error with {node_data['ip_address']}\") return is_stuck = block_height <= node_data.setdefault('block_height',", "block height: ' + block_height try_message_with_home_menu(context=context, chat_id=chat_id, text=text) def check_thorchain_midgard_api(context,", "message: try_message_to_all_users(context, text=message) def check_solvency(context) -> [str, None]: try: asgard_solvency", "*\" + '{:.2f}'.format( int(get_network_security_ratio(network) * 100)) + \" %*\\n\\n\" \\", "APY\\n\\n\" \\ \"↩️ Liquidity ROI: *\" + '{:.2f}'.format( float(network['liquidityAPY']) *", "\\ 'Block height stuck at: ' + block_height + '\\n\\n'", "None def check_versions_status(context): chat_data = context.job.context['chat_data'] try: node_accounts = get_node_accounts()", "\" %* APY\\n\\n\" \\ \"↩️ Liquidity ROI: *\" + '{:.2f}'.format(", "= context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] was_healthy = node_data.setdefault('is_midgard_healthy', True) is_midgard_healthy", "get the node accounts while checking if churning occurred.\") return", "True was_healthy = node_data[\"healthy\"] try: # Check whether node answers.", "block_height = get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error", "\\ 'IP: ' + node_data['ip_address'] + '\\n' + \\ 'THORNode:", "= 0 node_data['block_height'] = block_height node_data[\"block_height_stuck_count\"] = block_height_stuck_count def check_solvency_job(context):", "node in chat_data.get('nodes', {}).values(): if version.parse(node['version']) < version.parse(highest_version): message =", "API is healthy again! 👌' + '\\n' + \\ 'IP:", "False try: is_currently_catching_up = is_thorchain_catching_up( node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout", "node accounts while checking if churning occurred.\") return if 'node_statuses'", "text else: return None def check_versions_status(context): chat_data = context.job.context['chat_data'] try:", "height: ' + block_height + '\\n\\n' + \\ 'Please check", "continue is_not_blocked = float(local_node['last_notification_timestamp']) < \\ datetime.timestamp( datetime.now() - timedelta(seconds=local_node['notification_timeout_in_seconds']))", "text=text) continue is_not_blocked = float(local_node['last_notification_timestamp']) < \\ datetime.timestamp( datetime.now() -", "yggdrasil_solvency) else: if insolvency_count >= MISSING_FUNDS_THRESHOLD: message = 'THORChain is", "field for field in ['status', 'bond', 'slash_points'] if local_node[field] !=", "= 'THORChain is *100% solvent* again! 👌\\n' insolvency_count = 0", "= [] churned_out = [] highest_churn_status_since = 0 for validator", "message: # Update data local_node['status'] = remote_node['status'] local_node['bond'] = remote_node['bond']", "node_data['ip_address'] + '\\n' + \\ 'THORNode: ' + node_data['alias'] +", "1) and ('slash_points' in changed_fields) and (slash_point_change <= threshold): return", "validators: remote_status = validator['status'] local_status = local_node_statuses[ validator['node_address']] if validator[", "for node in chat_data.get('nodes', {}).values(): if version.parse(node['version']) < version.parse(highest_version): message", "not in node_data: node_data['is_catching_up'] = False try: is_currently_catching_up = is_thorchain_catching_up(", "height: ' + block_height try_message_with_home_menu(context=context, chat_id=chat_id, text=text) def check_thorchain_midgard_api(context, node_address):", "\"🔄 CHURN SUMMARY\\n\" \\ \"THORChain has successfully churned:\\n\\n\" text +=", "in context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True was_healthy = node_data[\"healthy\"] try: #", "check_thornodes(context): chat_id = context.job.context['chat_id'] chat_data = context.job.context['chat_data'] inactive_nodes = []", "' + node_address + '\\n\\n' + \\ 'Please enter another", "while checking if churning occurred.\") return if 'node_statuses' not in", "return if 'node_statuses' not in context.bot_data: context.bot_data['node_statuses'] = {} for", "error with {node_data['ip_address']}\") return is_stuck = block_height <= node_data.setdefault('block_height', 0)", "answers. If it doesn't we get an Exception. get_latest_block_height(node_data['ip_address']) if", "= 'THORNode ' + local_node['alias'] + ' is not active", "inactive_nodes.append(node_address) try_message_with_home_menu(context=context, chat_id=chat_id, text=text) continue is_not_blocked = float(local_node['last_notification_timestamp']) < \\", "v: version.parse(v)) last_newest_version = chat_data.get('newest_software_version', None) if last_newest_version is None", "try_message_with_home_menu(context, chat_id=chat_id, text=text) else: text = 'Midgard API is not", "+= \"\\nSystem:\\n\" try: network = get_network_data() text += f\"📡 Network", "block_height_stuck_count == 1: text = 'Block height is not increasing", "as e: logger.exception(e) return None is_solvent = asgard_solvency['is_solvent'] and yggdrasil_solvency['is_solvent']", "context.job.context['chat_data']['nodes'][node_address] was_healthy = node_data.setdefault('is_midgard_healthy', True) is_midgard_healthy = is_midgard_api_healthy(node_data['ip_address']) if was_healthy", "Added:\\n\" if len(churned_in) else \"\" for node in churned_in: text", "try_message_to_all_users(context, text=message) def check_solvency(context) -> [str, None]: try: asgard_solvency =", "chat_data.get('nodes', {}).items(): try: remote_node = get_thornode_object_or_none(address=node_address) except HTTPError as e:", "validator in validators: if did_churn_happen(validator, local_node_statuses, highest_churn_status_since): highest_churn_status_since = int(validator['status_since'])", "highest_churn_status_since) -> bool: remote_status = validator['status'] local_status = local_node_statuses[validator['node_address']] if", "\"\"\" Check that Midgard API is ok \"\"\" chat_id =", "insolvency_count == MISSING_FUNDS_THRESHOLD: message = 'THORChain is *missing funds*! 💀\\n\\n'", "from packaging import version from service.utils import * def check_thornodes(context):", "= validator['status'] return local_node_statuses = context.bot_data['node_statuses'] churned_in = [] churned_out", "*{tor_to_rune(node['bond'])}*\\n\" text += \"\\nSystem:\\n\" try: network = get_network_data() text +=", "== 1: text = 'Block height is not increasing anymore!", "('slash_points' in changed_fields) and (slash_point_change <= threshold): return None if", "* 100)) + \" %*\\n\\n\" \\ \"↩️ Bonding ROI: *\"", "ROI: *\" + '{:.2f}'.format( float(network['bondingAPY']) * 100) + \" %*", "local_node_statuses, highest_churn_status_since) -> bool: remote_status = validator['status'] local_status = local_node_statuses[validator['node_address']]", "'ready' and remote_status == 'active') or ( local_status == 'active'", "False return False def check_thorchain_block_height(context, node_address): chat_id = context.job.context['chat_id'] node_data", "to the latest block height again! 👌' + '\\n' +", "THORNode address.' inactive_nodes.append(node_address) try_message_with_home_menu(context=context, chat_id=chat_id, text=text) continue is_not_blocked = float(local_node['last_notification_timestamp'])", "text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True return True except (Timeout, ConnectionError, BadStatusException,", "(total)\\n\\n\" \\ \"⚖️ Bonded/Staked Ratio: *\" + '{:.2f}'.format( int(get_network_security_ratio(network) *", "= 'The node caught up to the latest block height", "= context.job.context['chat_data']['nodes'][node_address] try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout", "node_address, local_node in chat_data.get('nodes', {}).items(): try: remote_node = get_thornode_object_or_none(address=node_address) except", "churning occurred.\") return if 'node_statuses' not in context.bot_data: context.bot_data['node_statuses'] =", "= get_network_data() text += f\"📡 Network Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\ f\"💚", "0 for validator in validators: if did_churn_happen(validator, local_node_statuses, highest_churn_status_since): highest_churn_status_since", "+= f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nSystem:\\n\" try: network = get_network_data()", "check your Thornode immediately!' try_message_with_home_menu(context=context, chat_id=chat_id, text=text) else: if block_height_stuck_count", "\"bond\": validator['bond'] }) elif 'active' == local_status: churned_out.append({ \"address\": validator['node_address'],", "'{:.2f}'.format( int(get_network_security_ratio(network) * 100)) + \" %*\\n\\n\" \\ \"↩️ Bonding", "CHANGE 🚨\\n\" context.bot_data['vault_addresses'][chain['chain']] = chain['address'] except Exception as e: logger.exception(e)", "and remote_status == 'standby')): return True return False def is_thornode_healthy(context,", "return message def check_thorchain_catch_up_status(context, node_address): \"\"\" Check if node is", "ConnectionError, BadStatusException, Exception): if was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] =", "context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = False return False def check_thorchain_block_height(context, node_address): chat_id =", "\" %*\\n\\n\" \\ \"↩️ Bonding ROI: *\" + '{:.2f}'.format( float(network['bondingAPY'])", "if 'bond' in changed_fields: text += f\" ➡️ {tor_to_rune(int(remote_node['bond']))}\" text", "!= local_status: if 'active' == remote_status: churned_in.append({ \"address\": validator['node_address'], \"bond\":", "block height again! 👌' + '\\n' + \\ 'IP: '", "return None if len(changed_fields) > 0: text = f\"THORNode: {local_node['alias']}\\n\"", "except HTTPError as e: logger.exception(e) continue if remote_node is None:", "len(changed_fields) > 0: text = f\"THORNode: {local_node['alias']}\\n\" \\ f\"Address: {local_node['node_address']}\\n\"", "\\ 'Node address: ' + node_address + '\\n\\n' + \\", "is_not_blocked = float(local_node['last_notification_timestamp']) < \\ datetime.timestamp( datetime.now() - timedelta(seconds=local_node['notification_timeout_in_seconds'])) if", "node_address): \"\"\" Check that Midgard API is ok \"\"\" chat_id", "remote_status: churned_in.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] }) elif 'active' ==", "return if node_data['is_catching_up'] != is_currently_catching_up: try: block_height = get_latest_block_height(node_data['ip_address']) except", "context.bot_data['vault_addresses']: if chain['address'] != context.bot_data['vault_addresses'][chain['chain']]: text += f\"\\n\\n🔐 Vault Addresses:\"", "context.job.context['chat_data'] try: node_accounts = get_node_accounts() except Exception as e: logger.exception(e)", "node_address + '\\n\\n' + \\ 'Please check your Thornode immediately!'", "{node_data['alias']}\\n\" + \\ f\"Node address: {node_address}\\n\" + \\ f\"Block height", "Points: ' + '{:,}'.format(int(local_node['slash_points'])) if 'slash_points' in changed_fields: text +=", "+= f\" ➡️ {tor_to_rune(int(remote_node['bond']))}\" text += '\\nSlash Points: ' +", "and Yggdrasil.\") return None except Exception as e: logger.exception(e) return", "if 'is_catching_up' not in node_data: node_data['is_catching_up'] = False try: is_currently_catching_up", "'\\n' + \\ 'Current block height: ' + block_height +", "in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] return local_node_statuses = context.bot_data['node_statuses']", "if 'node_statuses' not in context.bot_data: context.bot_data['node_statuses'] = {} for validator", "else: if insolvency_count >= MISSING_FUNDS_THRESHOLD: message = 'THORChain is *100%", "is_currently_catching_up: try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or", "Vault Addresses:\" if \"Vault Addresses\" not in text else \"\"", "'active' == local_status: churned_out.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] }) if", "is *missing funds*! 💀\\n\\n' message += get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency) else: if", "or version.parse( highest_version) > version.parse(last_newest_version): chat_data['newest_software_version'] = highest_version for node", "# Check whether node answers. If it doesn't we get", "text += f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\" if 'bond' in changed_fields: text +=", "local_status = local_node_statuses[ validator['node_address']] if validator[ 'node_address'] in local_node_statuses else", "up to the latest block height again! 👌' + '\\n'", "Removed:\\n\" if len(churned_out) else \"\" for node in churned_out: text", "(Timeout, ConnectionError, BadStatusException, Exception): if was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"]", "asgard_solvency_check() yggdrasil_solvency = yggdrasil_solvency_check() except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection", "= \"currently unavailable\" if is_currently_catching_up: node_data['is_catching_up'] = True text =", "+ \\ 'THORNode: ' + node_data['alias'] + '\\n' + \\", "not in text else \"\" text += f\"\\n*{chain['chain']}*: \\n\" \\", "try: # Check whether node answers. If it doesn't we", "\\ f\"but one of the nodes already runs on *{highest_version}*\"", "whether node answers. If it doesn't we get an Exception.", "remote_status = validator['status'] local_status = local_node_statuses[validator['node_address']] if validator[ 'node_address'] in", "= get_thornode_object_or_none(address=node_address) except HTTPError as e: logger.exception(e) continue if remote_node", "the nodes already runs on *{highest_version}*\" try_message_with_home_menu( context, chat_id=context.job.context['chat_id'], text=message)", "= float(local_node['last_notification_timestamp']) < \\ datetime.timestamp( datetime.now() - timedelta(seconds=local_node['notification_timeout_in_seconds'])) if is_not_blocked:", "def check_solvency(context) -> [str, None]: try: asgard_solvency = asgard_solvency_check() yggdrasil_solvency", "ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") return if node_data['is_catching_up']", "latest block height again! 👌' + '\\n' + \\ 'IP:", "with {node_data['ip_address']}\") block_height = \"currently unavailable\" if is_currently_catching_up: node_data['is_catching_up'] =", "validator['bond'] }) if len(churned_in) or len(churned_out): text = \"🔄 CHURN", "check_solvency(context) if message: try_message_to_all_users(context, text=message) def check_solvency(context) -> [str, None]:", "initialized assuming node was healhty. if \"healthy\" not in context.job.context['chat_data']['nodes'][node_address]:", "{node_data['ip_address']}\") return if node_data['is_catching_up'] != is_currently_catching_up: try: block_height = get_latest_block_height(node_data['ip_address'])", "Exception as e: logger.exception(e) logger.error(\"I couldn't get the node accounts", "current_chains = get_pool_addresses_from_any_node() for chain in current_chains: if chain['chain'] in", "{tor_to_rune(int(local_node['bond']))}\" if 'bond' in changed_fields: text += f\" ➡️ {tor_to_rune(int(remote_node['bond']))}\"", "[str, None]: changed_fields = [ field for field in ['status',", "is_currently_catching_up: node_data['is_catching_up'] = True text = 'The Node is behind", "chat_data['nodes'][node_address] def build_notification_message_for_active_node(local_node, remote_node, context) -> [str, None]: changed_fields =", "chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] try: block_height = get_latest_block_height(node_data['ip_address'])", "address: ' + node_address try_message_with_home_menu(context, chat_id=chat_id, text=text) else: text =", "'{:,}'.format(int(local_node['slash_points'])) if 'slash_points' in changed_fields: text += ' ➡️ '", "message = check_solvency(context) if message: try_message_to_all_users(context, text=message) def check_solvency(context) ->", "= asgard_solvency_check() yggdrasil_solvency = yggdrasil_solvency_check() except (Timeout, ConnectionError): logger.warning(f\"Timeout or", "f\"💚 Total Active Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\" \\ \"⚖️ Bonded/Staked Ratio:", "{remote_node[\"status\"].capitalize()}' text += f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\" if 'bond' in changed_fields: text", "height again! 👌' + '\\n' + \\ 'IP: ' +", "{}).values(): if version.parse(node['version']) < version.parse(highest_version): message = f\"Consider updating the", "\"\"\" chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] if 'is_catching_up' not", "'Current block height: ' + block_height try_message_with_home_menu(context=context, chat_id=chat_id, text=text) def", "not active anymore! 💀' + '\\n' + \\ 'Address: '", "in changed_fields) and (slash_point_change <= threshold): return None if len(changed_fields)", "get_node_accounts() except Exception as e: logger.exception(e) logger.error(\"I couldn't get the", "CHURNING BUT THE VAULT ADDRESSES DID NOT CHANGE 🚨\\n\" context.bot_data['vault_addresses'][chain['chain']]", "*missing funds*! 💀\\n\\n' message += get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency) else: if insolvency_count", "'Node address: ' + node_address + '\\n' + \\ 'Current", "' + local_node['alias'] + ' is not active anymore! 💀'", "f\"Block height is increasing again! 👌\\n\" + \\ f\"IP: {node_data['ip_address']}\\n\"", "asgard_solvency = asgard_solvency_check() yggdrasil_solvency = yggdrasil_solvency_check() except (Timeout, ConnectionError): logger.warning(f\"Timeout", "block height: ' + block_height + '\\n\\n' + \\ 'Please", "chat_data = context.job.context['chat_data'] inactive_nodes = [] for node_address, local_node in", "validator in validators: remote_status = validator['status'] local_status = local_node_statuses[ validator['node_address']]", "0 node_data['block_height'] = block_height node_data[\"block_height_stuck_count\"] = block_height_stuck_count def check_solvency_job(context): message", "\\ 'Please check your Thornode immediately!' try_message_with_home_menu(context=context, chat_id=chat_id, text=text) else:", "node_address): \"\"\" Check if node is some blocks behind with", "Exception. get_latest_block_height(node_data['ip_address']) if not was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] =", "text = f\"THORNode: {local_node['alias']}\\n\" \\ f\"Address: {local_node['node_address']}\\n\" \\ f\"Status: {local_node['status'].capitalize()}\"", "node caught up to the latest block height again! 👌'", "+ \\ f\"Block height now at: {block_height}\\n\" try_message_with_home_menu(context=context, chat_id=chat_id, text=text)", "text += f\"\\n\\n🔐 Vault Addresses:\" if \"Vault Addresses\" not in", "node_data.setdefault(\"block_height_stuck_count\", 0) if is_stuck: block_height_stuck_count += 1 if block_height_stuck_count ==", "yggdrasil_solvency_check() except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error while querying", "node_data = context.job.context['chat_data']['nodes'][node_address] was_healthy = node_data.setdefault('is_midgard_healthy', True) is_midgard_healthy = is_midgard_api_healthy(node_data['ip_address'])", "logger.exception(e) logger.error(\"I couldn't get the node accounts while checking version", "check_churning(context): try: validators = get_node_accounts() except Exception as e: logger.exception(e)", "return None is_solvent = asgard_solvency['is_solvent'] and yggdrasil_solvency['is_solvent'] insolvency_count = context.bot_data.setdefault(\"insolvency_count\",", "changed_fields = [ field for field in ['status', 'bond', 'slash_points']", "get the node accounts while checking version status.\") return highest_version", "context.job.context['chat_id'] chat_data = context.job.context['chat_data'] inactive_nodes = [] for node_address, local_node", "return None except Exception as e: logger.exception(e) return None is_solvent", "return local_node_statuses = context.bot_data['node_statuses'] churned_in = [] churned_out = []", "= get_node_accounts() except Exception as e: logger.exception(e) logger.error(\"I couldn't get", "if chain['address'] != context.bot_data['vault_addresses'][chain['chain']]: text += f\"\\n\\n🔐 Vault Addresses:\" if", "text += \"Nodes Added:\\n\" if len(churned_in) else \"\" for node", "< \\ datetime.timestamp( datetime.now() - timedelta(seconds=local_node['notification_timeout_in_seconds'])) if is_not_blocked: message =", "in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] def did_churn_happen(validator, local_node_statuses, highest_churn_status_since)", "\"↩️ Bonding ROI: *\" + '{:.2f}'.format( float(network['bondingAPY']) * 100) +", "➡️ {tor_to_rune(int(remote_node['bond']))}\" text += '\\nSlash Points: ' + '{:,}'.format(int(local_node['slash_points'])) if", "remote_node['bond'] local_node['slash_points'] = remote_node['slash_points'] local_node['ip_address'] = remote_node['ip_address'] local_node['last_notification_timestamp'] = datetime.timestamp(datetime.now())", "logger.exception(e) logger.error(\"I couldn't get the node accounts while checking if", "\\ \"↩️ Liquidity ROI: *\" + '{:.2f}'.format( float(network['liquidityAPY']) * 100)", "Check if node is some blocks behind with catch up", "Midgard API is ok \"\"\" chat_id = context.job.context['chat_id'] node_data =", "text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = False return False def check_thorchain_block_height(context, node_address): chat_id", "check your Thornode immediately!' else: node_data['is_catching_up'] = False text =", "status.\") return highest_version = max(map(lambda n: n['version'], node_accounts), key=lambda v:", "APY\" context.bot_data.setdefault(\"vault_addresses\", {}) current_chains = get_pool_addresses_from_any_node() for chain in current_chains:", "‼️\\n\" \\ f\"Your software version is *{node['version']}* \" \\ f\"but", "= validator['status'] def did_churn_happen(validator, local_node_statuses, highest_churn_status_since) -> bool: remote_status =", "in local_node_statuses else \"unknown\" if int(validator['status_since']) > highest_churn_status_since and \\", "not initialized assuming node was healhty. if \"healthy\" not in", "'\\n' + \\ 'Node address: ' + node_address + '\\n\\n'", "+ \\ 'Node address: ' + node_address + '\\n\\n' +", "node_address=node_address) check_thorchain_midgard_api(context, node_address=node_address) for node_address in inactive_nodes: del chat_data['nodes'][node_address] def", "try: network = get_network_data() text += f\"📡 Network Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\"", "= asgard_solvency['is_solvent'] and yggdrasil_solvency['is_solvent'] insolvency_count = context.bot_data.setdefault(\"insolvency_count\", 0) message =", "status \"\"\" chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] if 'is_catching_up'", "local_node in chat_data.get('nodes', {}).items(): try: remote_node = get_thornode_object_or_none(address=node_address) except HTTPError", "text = 'THORNode ' + local_node['alias'] + ' is not", "*= NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context, chat_id=chat_id, text=message) else: local_node['notification_timeout_in_seconds'] = INITIAL_NOTIFICATION_TIMEOUT if", "check_thorchain_block_height(context, node_address=node_address) check_thorchain_catch_up_status(context, node_address=node_address) check_thorchain_midgard_api(context, node_address=node_address) for node_address in inactive_nodes:", "churned_in.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] }) elif 'active' == local_status:", "node_data['alias'] + '\\n' + \\ 'Node address: ' + node_address", "chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] if 'is_catching_up' not in", "f\"Block height now at: {block_height}\\n\" try_message_with_home_menu(context=context, chat_id=chat_id, text=text) block_height_stuck_count =", "address.' inactive_nodes.append(node_address) try_message_with_home_menu(context=context, chat_id=chat_id, text=text) continue is_not_blocked = float(local_node['last_notification_timestamp']) <", "if version.parse(node['version']) < version.parse(highest_version): message = f\"Consider updating the software", "in churned_in: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nNodes Removed:\\n\"", "True) is_midgard_healthy = is_midgard_api_healthy(node_data['ip_address']) if was_healthy != is_midgard_healthy: if is_midgard_healthy:", "again! 👌\\n\" + \\ f\"IP: {node_data['ip_address']}\\n\" + \\ f\"THORNode: {node_data['alias']}\\n\"", "'active' and remote_status == 'standby')): return True return False def", "in churned_out: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nSystem:\\n\" try:", "node: *{node['alias']}* ‼️\\n\" \\ f\"Your software version is *{node['version']}* \"", "= False return False def check_thorchain_block_height(context, node_address): chat_id = context.job.context['chat_id']", "+= 1 if block_height_stuck_count == 1: text = 'Block height", "] threshold = get_slash_points_threshold(context) slash_point_change = abs(int(local_node['slash_points']) - int(remote_node['slash_points'])) if", "*{node['version']}* \" \\ f\"but one of the nodes already runs", "except Exception as e: logger.exception(e) return None is_solvent = asgard_solvency['is_solvent']", "int(get_network_security_ratio(network) * 100)) + \" %*\\n\\n\" \\ \"↩️ Bonding ROI:", "slash_point_change = abs(int(local_node['slash_points']) - int(remote_node['slash_points'])) if (len(changed_fields) <= 1) and", "f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nNodes Removed:\\n\" if len(churned_out) else \"\"", "check_thorchain_catch_up_status(context, node_address): \"\"\" Check if node is some blocks behind", "[] for node_address, local_node in chat_data.get('nodes', {}).items(): try: remote_node =", "'standby')): return True return False def is_thornode_healthy(context, node_address) -> bool:", "try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = False return False def check_thorchain_block_height(context,", "+= f\"📡 Network Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\ f\"💚 Total Active Bond:", "else \"unknown\" if int(validator['status_since']) > highest_churn_status_since and \\ ((local_status ==", "if int(validator['status_since']) > highest_churn_status_since and \\ ((local_status == 'ready' and", "\"healthy\" not in context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True was_healthy = node_data[\"healthy\"]", "+ '\\n' + \\ 'Node address: ' + node_address +", "if len(churned_in) or len(churned_out): text = \"🔄 CHURN SUMMARY\\n\" \\", "if node_data['is_catching_up'] != is_currently_catching_up: try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout,", "None or version.parse( highest_version) > version.parse(last_newest_version): chat_data['newest_software_version'] = highest_version for", "did_churn_happen(validator, local_node_statuses, highest_churn_status_since): highest_churn_status_since = int(validator['status_since']) for validator in validators:", "f\"Consider updating the software on your node: *{node['alias']}* ‼️\\n\" \\", "node in churned_out: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nSystem:\\n\"", "validator['bond'] }) elif 'active' == local_status: churned_out.append({ \"address\": validator['node_address'], \"bond\":", "text=message) else: local_node['notification_timeout_in_seconds'] = INITIAL_NOTIFICATION_TIMEOUT if local_node['status'].upper() in MONITORED_STATUSES and", "error with {node_data['ip_address']}\") return if node_data['is_catching_up'] != is_currently_catching_up: try: block_height", "blocks behind with catch up status \"\"\" chat_id = context.job.context['chat_id']", "' is not active anymore! 💀' + '\\n' + \\", "block_height_stuck_count = 0 node_data['block_height'] = block_height node_data[\"block_height_stuck_count\"] = block_height_stuck_count def", "node_data = context.job.context['chat_data']['nodes'][node_address] # If not initialized assuming node was", "\\ 'Address: ' + node_address + '\\n\\n' + \\ 'Please", "for node_address in inactive_nodes: del chat_data['nodes'][node_address] def build_notification_message_for_active_node(local_node, remote_node, context)", "\" %* APY\" context.bot_data.setdefault(\"vault_addresses\", {}) current_chains = get_pool_addresses_from_any_node() for chain", "your node: *{node['alias']}* ‼️\\n\" \\ f\"Your software version is *{node['version']}*", "the node accounts while checking if churning occurred.\") return if", "block_height_stuck_count def check_solvency_job(context): message = check_solvency(context) if message: try_message_to_all_users(context, text=message)", "0) message = None if not is_solvent: insolvency_count += 1", "version.parse(highest_version): message = f\"Consider updating the software on your node:", "accounts while checking if churning occurred.\") return if 'node_statuses' not", "if len(changed_fields) > 0: text = f\"THORNode: {local_node['alias']}\\n\" \\ f\"Address:", "from service.utils import * def check_thornodes(context): chat_id = context.job.context['chat_id'] chat_data", "validator[ 'node_address'] in local_node_statuses else \"unknown\" if int(validator['status_since']) > highest_churn_status_since", "highest_version) > version.parse(last_newest_version): chat_data['newest_software_version'] = highest_version for node in chat_data.get('nodes',", "\\ f\"Old Vault address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\" \\ f\"New Vault address:", "chat_data.get('newest_software_version', None) if last_newest_version is None or version.parse( highest_version) >", "chat_id=chat_id, text=text) else: text = 'Midgard API is not healthy", "node_data = context.job.context['chat_data']['nodes'][node_address] if 'is_catching_up' not in node_data: node_data['is_catching_up'] =", "' + node_address + '\\n\\n' + \\ 'Please check your", "*{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\ f\"💚 Total Active Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\" \\ \"⚖️", "Exception as e: logger.exception(e) try_message_to_all_users(context, text=text) for validator in validators:", "f\"New Vault address: {chain['address']}\\n\" else: text += \"\\n\\n⚠️ 🚨 CHURNING", "MONITORED_STATUSES and is_thornode_healthy(context, node_address): check_thorchain_block_height(context, node_address=node_address) check_thorchain_catch_up_status(context, node_address=node_address) check_thorchain_midgard_api(context, node_address=node_address)", "= None if not is_solvent: insolvency_count += 1 if insolvency_count", "churned_in = [] churned_out = [] highest_churn_status_since = 0 for", "= context.job.context['chat_data']['nodes'][node_address] was_healthy = node_data.setdefault('is_midgard_healthy', True) is_midgard_healthy = is_midgard_api_healthy(node_data['ip_address']) if", "'Midgard API is healthy again! 👌' + '\\n' + \\", "remote_node = get_thornode_object_or_none(address=node_address) except HTTPError as e: logger.exception(e) continue if", "is some blocks behind with catch up status \"\"\" chat_id", "the latest block height and catching up! 💀 ' +", "def build_notification_message_for_active_node(local_node, remote_node, context) -> [str, None]: changed_fields = [", "if remote_node is None: text = 'THORNode ' + local_node['alias']", "version status.\") return highest_version = max(map(lambda n: n['version'], node_accounts), key=lambda", "[] churned_out = [] highest_churn_status_since = 0 for validator in", "💀 ' + '\\n' + \\ 'IP: ' + node_data['ip_address']", "None: text = 'THORNode ' + local_node['alias'] + ' is", "is_not_blocked: message = build_notification_message_for_active_node(local_node, remote_node, context) if message: # Update", "validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] return local_node_statuses = context.bot_data['node_statuses'] churned_in", "context.bot_data.setdefault(\"vault_addresses\", {}) current_chains = get_pool_addresses_from_any_node() for chain in current_chains: if", "\\ f\"New Vault address: {chain['address']}\\n\" else: text += \"\\n\\n⚠️ 🚨", "up! 💀 ' + '\\n' + \\ 'IP: ' +", "e: logger.exception(e) continue if remote_node is None: text = 'THORNode", "remote_status != local_status: if 'active' == remote_status: churned_in.append({ \"address\": validator['node_address'],", "' + block_height try_message_with_home_menu(context=context, chat_id=chat_id, text=text) def check_thorchain_midgard_api(context, node_address): \"\"\"", "context.bot_data['vault_addresses'][chain['chain']] = chain['address'] except Exception as e: logger.exception(e) try_message_to_all_users(context, text=text)", "try_message_with_home_menu, try_message_to_all_users from packaging import version from service.utils import *", "*{node['alias']}* ‼️\\n\" \\ f\"Your software version is *{node['version']}* \" \\", "context.job.context['chat_data']['nodes'][node_address] try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or", "bool: chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] # If not", "message = 'THORChain is *missing funds*! 💀\\n\\n' message += get_insolvent_balances_message(asgard_solvency,", "local_node_statuses, highest_churn_status_since): highest_churn_status_since = int(validator['status_since']) for validator in validators: remote_status", "node accounts while checking version status.\") return highest_version = max(map(lambda", "or Connection error with {node_data['ip_address']}\") return if node_data['is_catching_up'] != is_currently_catching_up:", "context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] # If not initialized assuming node", "the software on your node: *{node['alias']}* ‼️\\n\" \\ f\"Your software", "node_address=node_address) for node_address in inactive_nodes: del chat_data['nodes'][node_address] def build_notification_message_for_active_node(local_node, remote_node,", "f\"THORNode: {local_node['alias']}\\n\" \\ f\"Address: {local_node['node_address']}\\n\" \\ f\"Status: {local_node['status'].capitalize()}\" if 'status'", "try: node_accounts = get_node_accounts() except Exception as e: logger.exception(e) logger.error(\"I", "remote_node['status'] local_node['bond'] = remote_node['bond'] local_node['slash_points'] = remote_node['slash_points'] local_node['ip_address'] = remote_node['ip_address']", "text += f\"📡 Network Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\ f\"💚 Total Active", "+ \" %* APY\" context.bot_data.setdefault(\"vault_addresses\", {}) current_chains = get_pool_addresses_from_any_node() for", "+ \\ f\"IP: {node_data['ip_address']}\\n\" + \\ f\"THORNode: {node_data['alias']}\\n\" + \\", "'bond', 'slash_points'] if local_node[field] != remote_node[field] ] threshold = get_slash_points_threshold(context)", "logger.warning(f\"Timeout or Connection error while querying Asgard and Yggdrasil.\") return", "💀\\n\\n' message += get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency) else: if insolvency_count >= MISSING_FUNDS_THRESHOLD:", "chat_id=chat_id, text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = False return False def check_thorchain_block_height(context, node_address):", "increasing again! 👌\\n\" + \\ f\"IP: {node_data['ip_address']}\\n\" + \\ f\"THORNode:", "nodes already runs on *{highest_version}*\" try_message_with_home_menu( context, chat_id=context.job.context['chat_id'], text=message) def", "remote_node, context) if message: # Update data local_node['status'] = remote_node['status']", "= datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds'] *= NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context, chat_id=chat_id, text=message) else: local_node['notification_timeout_in_seconds']", "= 'Midgard API is healthy again! 👌' + '\\n' +", "= 0 for validator in validators: if did_churn_happen(validator, local_node_statuses, highest_churn_status_since):", "\\ f\"Status: {local_node['status'].capitalize()}\" if 'status' in changed_fields: text += f'", "changed_fields: text += f' ➡️ {remote_node[\"status\"].capitalize()}' text += f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\"", "= context.bot_data.setdefault(\"insolvency_count\", 0) message = None if not is_solvent: insolvency_count", "== MISSING_FUNDS_THRESHOLD: message = 'THORChain is *missing funds*! 💀\\n\\n' message", "+ '\\n\\n' + \\ 'Please check your Thornode immediately!' else:", "in changed_fields: text += f\" ➡️ {tor_to_rune(int(remote_node['bond']))}\" text += '\\nSlash", "try: validators = get_node_accounts() except Exception as e: logger.exception(e) logger.error(\"I", "remote_status == 'active') or ( local_status == 'active' and remote_status", "(Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error while querying Asgard and", "context) -> [str, None]: changed_fields = [ field for field", "is_midgard_api_healthy(node_data['ip_address']) if was_healthy != is_midgard_healthy: if is_midgard_healthy: text = 'Midgard", "{}).items(): try: remote_node = get_thornode_object_or_none(address=node_address) except HTTPError as e: logger.exception(e)", "Bonding ROI: *\" + '{:.2f}'.format( float(network['bondingAPY']) * 100) + \"", "that Midgard API is ok \"\"\" chat_id = context.job.context['chat_id'] node_data", "try_message_with_home_menu( context, chat_id=context.job.context['chat_id'], text=message) def check_churning(context): try: validators = get_node_accounts()", "\\ f\"Block height now at: {block_height}\\n\" try_message_with_home_menu(context=context, chat_id=chat_id, text=text) block_height_stuck_count", "remote_node[field] ] threshold = get_slash_points_threshold(context) slash_point_change = abs(int(local_node['slash_points']) - int(remote_node['slash_points']))", "try_message_with_home_menu(context=context, chat_id=chat_id, text=text) continue is_not_blocked = float(local_node['last_notification_timestamp']) < \\ datetime.timestamp(", "node_address) -> bool: chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] #", "text = 'Block height is not increasing anymore! 💀' +", "➡️ {remote_node[\"status\"].capitalize()}' text += f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\" if 'bond' in changed_fields:", "1: text = f\"Block height is increasing again! 👌\\n\" +", "node_data[\"healthy\"] try: # Check whether node answers. If it doesn't", "and is_thornode_healthy(context, node_address): check_thorchain_block_height(context, node_address=node_address) check_thorchain_catch_up_status(context, node_address=node_address) check_thorchain_midgard_api(context, node_address=node_address) for", "was_healthy = node_data.setdefault('is_midgard_healthy', True) is_midgard_healthy = is_midgard_api_healthy(node_data['ip_address']) if was_healthy !=", "height is not increasing anymore! 💀' + '\\n' + \\", "= context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] try: block_height = get_latest_block_height(node_data['ip_address']) except", "checking version status.\") return highest_version = max(map(lambda n: n['version'], node_accounts),", "with catch up status \"\"\" chat_id = context.job.context['chat_id'] node_data =", "chat_data.get('nodes', {}).values(): if version.parse(node['version']) < version.parse(highest_version): message = f\"Consider updating", "check_thorchain_catch_up_status(context, node_address=node_address) check_thorchain_midgard_api(context, node_address=node_address) for node_address in inactive_nodes: del chat_data['nodes'][node_address]", "# If not initialized assuming node was healhty. if \"healthy\"", "len(churned_out): text = \"🔄 CHURN SUMMARY\\n\" \\ \"THORChain has successfully", "🚨 CHURNING BUT THE VAULT ADDRESSES DID NOT CHANGE 🚨\\n\"", "None if not is_solvent: insolvency_count += 1 if insolvency_count ==", "return True return False def is_thornode_healthy(context, node_address) -> bool: chat_id", "node_address in inactive_nodes: del chat_data['nodes'][node_address] def build_notification_message_for_active_node(local_node, remote_node, context) ->", "f\"⬇️\\n\" \\ f\"New Vault address: {chain['address']}\\n\" else: text += \"\\n\\n⚠️", "(Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") return is_stuck", "'\\n\\n' + \\ 'Please check your Thornode immediately!' try_message_with_home_menu(context=context, chat_id=chat_id,", "local_status: churned_out.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] }) if len(churned_in) or", "caught up to the latest block height again! 👌' +", "try: remote_node = get_thornode_object_or_none(address=node_address) except HTTPError as e: logger.exception(e) continue", "one of the nodes already runs on *{highest_version}*\" try_message_with_home_menu( context,", "= context.job.context['chat_data']['nodes'][node_address] # If not initialized assuming node was healhty.", "Ratio: *\" + '{:.2f}'.format( int(get_network_security_ratio(network) * 100)) + \" %*\\n\\n\"", "get_node_health_warning_message, get_node_healthy_again_message from handlers.chat_helpers import try_message_with_home_menu, try_message_to_all_users from packaging import", "except Exception as e: logger.exception(e) try_message_to_all_users(context, text=text) for validator in", "if churning occurred.\") return if 'node_statuses' not in context.bot_data: context.bot_data['node_statuses']", "except (Timeout, ConnectionError, BadStatusException, Exception): if was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_health_warning_message(node_data))", "= block_height_stuck_count def check_solvency_job(context): message = check_solvency(context) if message: try_message_to_all_users(context,", "= [] for node_address, local_node in chat_data.get('nodes', {}).items(): try: remote_node", "text = 'Midgard API is healthy again! 👌' + '\\n'", "text=text) for validator in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] def", "= validator['status'] local_status = local_node_statuses[ validator['node_address']] if validator[ 'node_address'] in", "+= f\"\\n*{chain['chain']}*: \\n\" \\ f\"Old Vault address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\" \\", "validator in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] def did_churn_happen(validator, local_node_statuses,", "+ node_address + '\\n\\n' + \\ 'Please check your Thornode", "\"unknown\" if int(validator['status_since']) > highest_churn_status_since and \\ ((local_status == 'ready'", "'\\n' + \\ 'IP: ' + node_data['ip_address'] + '\\n' +", "block_height + '\\n\\n' + \\ 'Please check your Thornode immediately!'", "= abs(int(local_node['slash_points']) - int(remote_node['slash_points'])) if (len(changed_fields) <= 1) and ('slash_points'", "if 'active' == remote_status: churned_in.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] })", "highest_version for node in chat_data.get('nodes', {}).values(): if version.parse(node['version']) < version.parse(highest_version):", "return False def is_thornode_healthy(context, node_address) -> bool: chat_id = context.job.context['chat_id']", "+ local_node['alias'] + ' is not active anymore! 💀' +", "+ node_address + '\\n' + \\ 'Block height stuck at:", "node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\")", "check_thorchain_block_height(context, node_address): chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] try: block_height", "f\"Status: {local_node['status'].capitalize()}\" if 'status' in changed_fields: text += f' ➡️", "in inactive_nodes: del chat_data['nodes'][node_address] def build_notification_message_for_active_node(local_node, remote_node, context) -> [str,", "in validators: if did_churn_happen(validator, local_node_statuses, highest_churn_status_since): highest_churn_status_since = int(validator['status_since']) for", "churned_out.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] }) if len(churned_in) or len(churned_out):", "check_versions_status(context): chat_data = context.job.context['chat_data'] try: node_accounts = get_node_accounts() except Exception", "node_accounts), key=lambda v: version.parse(v)) last_newest_version = chat_data.get('newest_software_version', None) if last_newest_version", "churned:\\n\\n\" text += \"Nodes Added:\\n\" if len(churned_in) else \"\" for", "address: {node_address}\\n\" + \\ f\"Block height now at: {block_height}\\n\" try_message_with_home_menu(context=context,", "in chat_data.get('nodes', {}).items(): try: remote_node = get_thornode_object_or_none(address=node_address) except HTTPError as", "chat_id=chat_id, text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True return True except (Timeout, ConnectionError,", "validator in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] return local_node_statuses =", "if remote_status != local_status: if 'active' == remote_status: churned_in.append({ \"address\":", "local_node['ip_address'] = remote_node['ip_address'] local_node['last_notification_timestamp'] = datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds'] *= NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context,", "Exception): if was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = False return", "+ \\ 'Node address: ' + node_address try_message_with_home_menu(context, chat_id=chat_id, text=text)", "API is not healthy anymore! 💀' + '\\n' + \\", "else \"unknown\" if remote_status != local_status: if 'active' == remote_status:", "+ '\\n' + \\ 'IP: ' + node_data['ip_address'] + '\\n'", "+= 1 if insolvency_count == MISSING_FUNDS_THRESHOLD: message = 'THORChain is", "'node_statuses' not in context.bot_data: context.bot_data['node_statuses'] = {} for validator in", "= 0 context.bot_data[\"insolvency_count\"] = insolvency_count return message def check_thorchain_catch_up_status(context, node_address):", "unavailable\" if is_currently_catching_up: node_data['is_catching_up'] = True text = 'The Node", "local_node['notification_timeout_in_seconds'] = INITIAL_NOTIFICATION_TIMEOUT if local_node['status'].upper() in MONITORED_STATUSES and is_thornode_healthy(context, node_address):", "\"\"\" chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] was_healthy = node_data.setdefault('is_midgard_healthy',", "is not increasing anymore! 💀' + '\\n' + \\ 'IP:", "if validator[ 'node_address'] in local_node_statuses else \"unknown\" if remote_status !=", "validators: if did_churn_happen(validator, local_node_statuses, highest_churn_status_since): highest_churn_status_since = int(validator['status_since']) for validator", "' + '{:,}'.format(int(local_node['slash_points'])) if 'slash_points' in changed_fields: text += '", "anymore! 💀' + '\\n' + \\ 'Address: ' + node_address", "None]: changed_fields = [ field for field in ['status', 'bond',", "\\ \"⚖️ Bonded/Staked Ratio: *\" + '{:.2f}'.format( int(get_network_security_ratio(network) * 100))", "churned_out = [] highest_churn_status_since = 0 for validator in validators:", "is increasing again! 👌\\n\" + \\ f\"IP: {node_data['ip_address']}\\n\" + \\", "👌\\n' insolvency_count = 0 context.bot_data[\"insolvency_count\"] = insolvency_count return message def", "local_node['notification_timeout_in_seconds'] *= NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context, chat_id=chat_id, text=message) else: local_node['notification_timeout_in_seconds'] = INITIAL_NOTIFICATION_TIMEOUT", "\"\"\" Check if node is some blocks behind with catch", "up status \"\"\" chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] if", "else: node_data['is_catching_up'] = False text = 'The node caught up", "the node accounts while checking version status.\") return highest_version =", "+ '{:.2f}'.format( int(get_network_security_ratio(network) * 100)) + \" %*\\n\\n\" \\ \"↩️", "'\\n\\n' + \\ 'Please check your Thornode immediately!' try_message_with_home_menu(context, chat_id=chat_id,", "version from service.utils import * def check_thornodes(context): chat_id = context.job.context['chat_id']", "now at: {block_height}\\n\" try_message_with_home_menu(context=context, chat_id=chat_id, text=text) block_height_stuck_count = 0 node_data['block_height']", "+= f' ➡️ {remote_node[\"status\"].capitalize()}' text += f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\" if 'bond'", "f\"IP: {node_data['ip_address']}\\n\" + \\ f\"THORNode: {node_data['alias']}\\n\" + \\ f\"Node address:", "chat_id = context.job.context['chat_id'] chat_data = context.job.context['chat_data'] inactive_nodes = [] for", "if not is_solvent: insolvency_count += 1 if insolvency_count == MISSING_FUNDS_THRESHOLD:", "is ok \"\"\" chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] was_healthy", "local_node['slash_points'] = remote_node['slash_points'] local_node['ip_address'] = remote_node['ip_address'] local_node['last_notification_timestamp'] = datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds']", "for chain in current_chains: if chain['chain'] in context.bot_data['vault_addresses']: if chain['address']", "version.parse(node['version']) < version.parse(highest_version): message = f\"Consider updating the software on", "else \"\" for node in churned_in: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\"", "max(map(lambda n: n['version'], node_accounts), key=lambda v: version.parse(v)) last_newest_version = chat_data.get('newest_software_version',", "+= f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nNodes Removed:\\n\" if len(churned_out) else", "' + '\\n' + \\ 'IP: ' + node_data['ip_address'] +", "remote_node['slash_points'] local_node['ip_address'] = remote_node['ip_address'] local_node['last_notification_timestamp'] = datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds'] *= NOTIFICATION_TIMEOUT_MULTIPLIER", "if \"Vault Addresses\" not in text else \"\" text +=", "if len(churned_out) else \"\" for node in churned_out: text +=", "is_thorchain_catching_up( node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with", "and \\ ((local_status == 'ready' and remote_status == 'active') or", "is *100% solvent* again! 👌\\n' insolvency_count = 0 context.bot_data[\"insolvency_count\"] =", "Liquidity ROI: *\" + '{:.2f}'.format( float(network['liquidityAPY']) * 100) + \"", "import * def check_thornodes(context): chat_id = context.job.context['chat_id'] chat_data = context.job.context['chat_data']", "= get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with", "highest_churn_status_since and \\ ((local_status == 'ready' and remote_status == 'active')", "def check_thorchain_midgard_api(context, node_address): \"\"\" Check that Midgard API is ok", "try: is_currently_catching_up = is_thorchain_catching_up( node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or", "if did_churn_happen(validator, local_node_statuses, highest_churn_status_since): highest_churn_status_since = int(validator['status_since']) for validator in", "< version.parse(highest_version): message = f\"Consider updating the software on your", "{}) current_chains = get_pool_addresses_from_any_node() for chain in current_chains: if chain['chain']", "= False try: is_currently_catching_up = is_thorchain_catching_up( node_data['ip_address']) except (Timeout, ConnectionError):", "or Connection error with {node_data['ip_address']}\") block_height = \"currently unavailable\" if", "again! 👌\\n' insolvency_count = 0 context.bot_data[\"insolvency_count\"] = insolvency_count return message", "f' ➡️ {remote_node[\"status\"].capitalize()}' text += f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\" if 'bond' in", "if chain['chain'] in context.bot_data['vault_addresses']: if chain['address'] != context.bot_data['vault_addresses'][chain['chain']]: text +=", "-> bool: remote_status = validator['status'] local_status = local_node_statuses[validator['node_address']] if validator[", "context.bot_data['node_statuses'] churned_in = [] churned_out = [] highest_churn_status_since = 0", "None except Exception as e: logger.exception(e) return None is_solvent =", "*\" + '{:.2f}'.format( float(network['liquidityAPY']) * 100) + \" %* APY\"", "if not was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True return", "def is_thornode_healthy(context, node_address) -> bool: chat_id = context.job.context['chat_id'] node_data =", "'bond' in changed_fields: text += f\" ➡️ {tor_to_rune(int(remote_node['bond']))}\" text +=", "remote_node is None: text = 'THORNode ' + local_node['alias'] +", "remote_node['ip_address'] local_node['last_notification_timestamp'] = datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds'] *= NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context, chat_id=chat_id, text=message)", "local_status: if 'active' == remote_status: churned_in.append({ \"address\": validator['node_address'], \"bond\": validator['bond']", "False text = 'The node caught up to the latest", "successfully churned:\\n\\n\" text += \"Nodes Added:\\n\" if len(churned_in) else \"\"", "None if len(changed_fields) > 0: text = f\"THORNode: {local_node['alias']}\\n\" \\", "chat_data['newest_software_version'] = highest_version for node in chat_data.get('nodes', {}).values(): if version.parse(node['version'])", "not in context.bot_data: context.bot_data['node_statuses'] = {} for validator in validators:", "node_address + '\\n' + \\ 'Block height stuck at: '", "address: ' + node_address + '\\n\\n' + \\ 'Please check", "MISSING_FUNDS_THRESHOLD: message = 'THORChain is *100% solvent* again! 👌\\n' insolvency_count", "height and catching up! 💀 ' + '\\n' + \\", "context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] if 'is_catching_up' not in node_data: node_data['is_catching_up']", "get an Exception. get_latest_block_height(node_data['ip_address']) if not was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_healthy_again_message(node_data))", "churned_in: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nNodes Removed:\\n\" if", "+= '\\nSlash Points: ' + '{:,}'.format(int(local_node['slash_points'])) if 'slash_points' in changed_fields:", "= f\"Block height is increasing again! 👌\\n\" + \\ f\"IP:", "int(validator['status_since']) > highest_churn_status_since and \\ ((local_status == 'ready' and remote_status", "Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\ f\"💚 Total Active Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\" \\", "{node_data['ip_address']}\\n\" + \\ f\"THORNode: {node_data['alias']}\\n\" + \\ f\"Node address: {node_address}\\n\"", "= context.job.context['chat_data'] try: node_accounts = get_node_accounts() except Exception as e:", "if insolvency_count == MISSING_FUNDS_THRESHOLD: message = 'THORChain is *missing funds*!", "with {node_data['ip_address']}\") return if node_data['is_catching_up'] != is_currently_catching_up: try: block_height =", "}) if len(churned_in) or len(churned_out): text = \"🔄 CHURN SUMMARY\\n\"", "local_node['status'] = remote_node['status'] local_node['bond'] = remote_node['bond'] local_node['slash_points'] = remote_node['slash_points'] local_node['ip_address']", "f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text += \"\\nSystem:\\n\" try: network = get_network_data() text", "+ \\ 'Address: ' + node_address + '\\n\\n' + \\", "behind with catch up status \"\"\" chat_id = context.job.context['chat_id'] node_data", "= INITIAL_NOTIFICATION_TIMEOUT if local_node['status'].upper() in MONITORED_STATUSES and is_thornode_healthy(context, node_address): check_thorchain_block_height(context,", "SUMMARY\\n\" \\ \"THORChain has successfully churned:\\n\\n\" text += \"Nodes Added:\\n\"", "insolvency_count return message def check_thorchain_catch_up_status(context, node_address): \"\"\" Check if node", "-> [str, None]: changed_fields = [ field for field in", "n['version'], node_accounts), key=lambda v: version.parse(v)) last_newest_version = chat_data.get('newest_software_version', None) if", "node_data['is_catching_up'] = True text = 'The Node is behind the", "context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True was_healthy = node_data[\"healthy\"] try: # Check whether", "local_node_statuses[ validator['node_address']] if validator[ 'node_address'] in local_node_statuses else \"unknown\" if", "logger.exception(e) continue if remote_node is None: text = 'THORNode '", "Active Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\" \\ \"⚖️ Bonded/Staked Ratio: *\" +", "\"bond\": validator['bond'] }) if len(churned_in) or len(churned_out): text = \"🔄", "as e: logger.exception(e) try_message_to_all_users(context, text=text) for validator in validators: context.bot_data['node_statuses'][", "MISSING_FUNDS_THRESHOLD: message = 'THORChain is *missing funds*! 💀\\n\\n' message +=", "+ '\\n' + \\ 'Address: ' + node_address + '\\n\\n'", "text += \"\\n\\n⚠️ 🚨 CHURNING BUT THE VAULT ADDRESSES DID", "+ \\ 'IP: ' + node_data['ip_address'] + '\\n' + \\", "at: ' + block_height + '\\n\\n' + \\ 'Please check", "healhty. if \"healthy\" not in context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True was_healthy", "some blocks behind with catch up status \"\"\" chat_id =", "int(remote_node['slash_points'])) if (len(changed_fields) <= 1) and ('slash_points' in changed_fields) and", "on your node: *{node['alias']}* ‼️\\n\" \\ f\"Your software version is", "occurred.\") return if 'node_statuses' not in context.bot_data: context.bot_data['node_statuses'] = {}", "error with {node_data['ip_address']}\") block_height = \"currently unavailable\" if is_currently_catching_up: node_data['is_catching_up']", "'\\n\\n' + \\ 'Please check your Thornode immediately!' else: node_data['is_catching_up']", "try_message_with_home_menu(context=context, chat_id=chat_id, text=message) else: local_node['notification_timeout_in_seconds'] = INITIAL_NOTIFICATION_TIMEOUT if local_node['status'].upper() in", "f\"Node address: {node_address}\\n\" + \\ f\"Block height now at: {block_height}\\n\"", "was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True return True except", "insolvency_count += 1 if insolvency_count == MISSING_FUNDS_THRESHOLD: message = 'THORChain", "your Thornode immediately!' try_message_with_home_menu(context=context, chat_id=chat_id, text=text) else: if block_height_stuck_count >=", "chat_id=chat_id, text=text) def check_thorchain_midgard_api(context, node_address): \"\"\" Check that Midgard API", "}) elif 'active' == local_status: churned_out.append({ \"address\": validator['node_address'], \"bond\": validator['bond']", "*\" + '{:.2f}'.format( float(network['bondingAPY']) * 100) + \" %* APY\\n\\n\"", "validator['status'] local_status = local_node_statuses[validator['node_address']] if validator[ 'node_address'] in local_node_statuses else", "+ \\ 'Node address: ' + node_address + '\\n' +", "immediately!' else: node_data['is_catching_up'] = False text = 'The node caught", "'\\n\\n' + \\ 'Please enter another THORNode address.' inactive_nodes.append(node_address) try_message_with_home_menu(context=context,", "' + node_data['ip_address'] + '\\n' + \\ 'THORNode: ' +", "= 'Midgard API is not healthy anymore! 💀' + '\\n'", "\"THORChain has successfully churned:\\n\\n\" text += \"Nodes Added:\\n\" if len(churned_in)", "chain in current_chains: if chain['chain'] in context.bot_data['vault_addresses']: if chain['address'] !=", "accounts while checking version status.\") return highest_version = max(map(lambda n:", "'node_address'] in local_node_statuses else \"unknown\" if int(validator['status_since']) > highest_churn_status_since and", "an Exception. get_latest_block_height(node_data['ip_address']) if not was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"]", "+ \\ 'Current block height: ' + block_height + '\\n\\n'", "'\\n' + \\ 'Node address: ' + node_address + '\\n'", "= build_notification_message_for_active_node(local_node, remote_node, context) if message: # Update data local_node['status']", "message += get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency) else: if insolvency_count >= MISSING_FUNDS_THRESHOLD: message", "> 0: text = f\"THORNode: {local_node['alias']}\\n\" \\ f\"Address: {local_node['node_address']}\\n\" \\", "= block_height node_data[\"block_height_stuck_count\"] = block_height_stuck_count def check_solvency_job(context): message = check_solvency(context)", "message = 'THORChain is *100% solvent* again! 👌\\n' insolvency_count =", "True return True except (Timeout, ConnectionError, BadStatusException, Exception): if was_healthy:", "f\"Address: {local_node['node_address']}\\n\" \\ f\"Status: {local_node['status'].capitalize()}\" if 'status' in changed_fields: text", "validator['node_address']] if validator[ 'node_address'] in local_node_statuses else \"unknown\" if remote_status", "+= f\"\\n\\n🔐 Vault Addresses:\" if \"Vault Addresses\" not in text", "if block_height_stuck_count == 1: text = 'Block height is not", "in node_data: node_data['is_catching_up'] = False try: is_currently_catching_up = is_thorchain_catching_up( node_data['ip_address'])", "ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") block_height = \"currently", "address: ' + node_address + '\\n' + \\ 'Current block", "in local_node_statuses else \"unknown\" if remote_status != local_status: if 'active'", "1 if block_height_stuck_count == 1: text = 'Block height is", "context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address] was_healthy = node_data.setdefault('is_midgard_healthy', True) is_midgard_healthy =", "for node in churned_out: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text +=", "address: ' + node_address + '\\n' + \\ 'Block height", "else: return None def check_versions_status(context): chat_data = context.job.context['chat_data'] try: node_accounts", "context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True was_healthy = node_data[\"healthy\"] try: # Check", "chat_id=chat_id, text=text) block_height_stuck_count = 0 node_data['block_height'] = block_height node_data[\"block_height_stuck_count\"] =", "if local_node[field] != remote_node[field] ] threshold = get_slash_points_threshold(context) slash_point_change =", "get_latest_block_height(node_data['ip_address']) if not was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True", "return None def check_versions_status(context): chat_data = context.job.context['chat_data'] try: node_accounts =", "remote_status == 'standby')): return True return False def is_thornode_healthy(context, node_address)", "Network Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\ f\"💚 Total Active Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\"", "ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") return is_stuck =", "not increasing anymore! 💀' + '\\n' + \\ 'IP: '", "with {node_data['ip_address']}\") return is_stuck = block_height <= node_data.setdefault('block_height', 0) block_height_stuck_count", "= check_solvency(context) if message: try_message_to_all_users(context, text=message) def check_solvency(context) -> [str,", "validator['status'] def did_churn_happen(validator, local_node_statuses, highest_churn_status_since) -> bool: remote_status = validator['status']", "node_data['is_catching_up'] = False try: is_currently_catching_up = is_thorchain_catching_up( node_data['ip_address']) except (Timeout,", "block_height_stuck_count >= 1: text = f\"Block height is increasing again!", "'\\n' + \\ 'Address: ' + node_address + '\\n\\n' +", "Addresses\" not in text else \"\" text += f\"\\n*{chain['chain']}*: \\n\"", "\\ 'Node address: ' + node_address try_message_with_home_menu(context, chat_id=chat_id, text=text) else:", "\\ f\"THORNode: {node_data['alias']}\\n\" + \\ f\"Node address: {node_address}\\n\" + \\", "' + node_data['alias'] + '\\n' + \\ 'Node address: '", "+ block_height try_message_with_home_menu(context=context, chat_id=chat_id, text=text) def check_thorchain_midgard_api(context, node_address): \"\"\" Check", "'status' in changed_fields: text += f' ➡️ {remote_node[\"status\"].capitalize()}' text +=", "*{highest_version}*\" try_message_with_home_menu( context, chat_id=context.job.context['chat_id'], text=message) def check_churning(context): try: validators =", "try: asgard_solvency = asgard_solvency_check() yggdrasil_solvency = yggdrasil_solvency_check() except (Timeout, ConnectionError):", "'\\n' + \\ 'THORNode: ' + node_data['alias'] + '\\n' +", "= False text = 'The node caught up to the", "remote_node, context) -> [str, None]: changed_fields = [ field for", "[ field for field in ['status', 'bond', 'slash_points'] if local_node[field]", "CHURN SUMMARY\\n\" \\ \"THORChain has successfully churned:\\n\\n\" text += \"Nodes", "- timedelta(seconds=local_node['notification_timeout_in_seconds'])) if is_not_blocked: message = build_notification_message_for_active_node(local_node, remote_node, context) if", "context, chat_id=context.job.context['chat_id'], text=message) def check_churning(context): try: validators = get_node_accounts() except", "return is_stuck = block_height <= node_data.setdefault('block_height', 0) block_height_stuck_count = node_data.setdefault(\"block_height_stuck_count\",", "int(validator['status_since']) for validator in validators: remote_status = validator['status'] local_status =", "chat_data = context.job.context['chat_data'] try: node_accounts = get_node_accounts() except Exception as", "if \"healthy\" not in context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True was_healthy =", "0) block_height_stuck_count = node_data.setdefault(\"block_height_stuck_count\", 0) if is_stuck: block_height_stuck_count += 1", "\\ f\"Node address: {node_address}\\n\" + \\ f\"Block height now at:", "'THORChain is *missing funds*! 💀\\n\\n' message += get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency) else:", "'\\nSlash Points: ' + '{:,}'.format(int(local_node['slash_points'])) if 'slash_points' in changed_fields: text", "f\"\\n*{chain['chain']}*: \\n\" \\ f\"Old Vault address: {context.bot_data['vault_addresses'][chain['chain']]}\\n\"\\ f\"⬇️\\n\" \\ f\"New", "+ '\\n' + \\ 'Node address: ' + node_address try_message_with_home_menu(context,", "get_thornode_object_or_none(address=node_address) except HTTPError as e: logger.exception(e) continue if remote_node is", "<= 1) and ('slash_points' in changed_fields) and (slash_point_change <= threshold):", "len(churned_out) else \"\" for node in churned_out: text += f\"*{node['address']}*\\nBond:", "is_thornode_healthy(context, node_address) -> bool: chat_id = context.job.context['chat_id'] node_data = context.job.context['chat_data']['nodes'][node_address]", "block_height_stuck_count += 1 if block_height_stuck_count == 1: text = 'Block", "= int(validator['status_since']) for validator in validators: remote_status = validator['status'] local_status", "text = 'The node caught up to the latest block", "+ ' is not active anymore! 💀' + '\\n' +", "== local_status: churned_out.append({ \"address\": validator['node_address'], \"bond\": validator['bond'] }) if len(churned_in)", "not in context.job.context['chat_data']['nodes'][node_address]: context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True was_healthy = node_data[\"healthy\"] try:", "'IP: ' + node_data['ip_address'] + '\\n' + \\ 'THORNode: '", "\\ 'THORNode: ' + node_data['alias'] + '\\n' + \\ 'Node", "stuck at: ' + block_height + '\\n\\n' + \\ 'Please", "+ '\\n\\n' + \\ 'Please check your Thornode immediately!' try_message_with_home_menu(context,", "🚨\\n\" context.bot_data['vault_addresses'][chain['chain']] = chain['address'] except Exception as e: logger.exception(e) try_message_to_all_users(context,", "+ node_address + '\\n\\n' + \\ 'Please enter another THORNode", "if local_node['status'].upper() in MONITORED_STATUSES and is_thornode_healthy(context, node_address): check_thorchain_block_height(context, node_address=node_address) check_thorchain_catch_up_status(context,", "if is_midgard_healthy: text = 'Midgard API is healthy again! 👌'", "is_solvent = asgard_solvency['is_solvent'] and yggdrasil_solvency['is_solvent'] insolvency_count = context.bot_data.setdefault(\"insolvency_count\", 0) message", "Total Active Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\" \\ \"⚖️ Bonded/Staked Ratio: *\"", "+ '{:,}'.format(int(remote_node['slash_points'])) return text else: return None def check_versions_status(context): chat_data", "+ \\ 'Please enter another THORNode address.' inactive_nodes.append(node_address) try_message_with_home_menu(context=context, chat_id=chat_id,", "text += ' ➡️ ' + '{:,}'.format(int(remote_node['slash_points'])) return text else:", "# Update data local_node['status'] = remote_node['status'] local_node['bond'] = remote_node['bond'] local_node['slash_points']", "HTTPError as e: logger.exception(e) continue if remote_node is None: text", "node_accounts = get_node_accounts() except Exception as e: logger.exception(e) logger.error(\"I couldn't", "-> [str, None]: try: asgard_solvency = asgard_solvency_check() yggdrasil_solvency = yggdrasil_solvency_check()", "not healthy anymore! 💀' + '\\n' + \\ 'IP: '", "= node_data.setdefault(\"block_height_stuck_count\", 0) if is_stuck: block_height_stuck_count += 1 if block_height_stuck_count", "text=text) else: if block_height_stuck_count >= 1: text = f\"Block height", "in ['status', 'bond', 'slash_points'] if local_node[field] != remote_node[field] ] threshold", "%* APY\\n\\n\" \\ \"↩️ Liquidity ROI: *\" + '{:.2f}'.format( float(network['liquidityAPY'])", "text=text) def check_thorchain_midgard_api(context, node_address): \"\"\" Check that Midgard API is", "latest block height and catching up! 💀 ' + '\\n'", "validator['node_address'], \"bond\": validator['bond'] }) if len(churned_in) or len(churned_out): text =", "* 100) + \" %* APY\" context.bot_data.setdefault(\"vault_addresses\", {}) current_chains =", "node_data['is_catching_up'] != is_currently_catching_up: try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError):", "text += f' ➡️ {remote_node[\"status\"].capitalize()}' text += f\"\\nBond: {tor_to_rune(int(local_node['bond']))}\" if", "ROI: *\" + '{:.2f}'.format( float(network['liquidityAPY']) * 100) + \" %*", "logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") return if node_data['is_catching_up'] !=", "node_address try_message_with_home_menu(context, chat_id=chat_id, text=text) else: text = 'Midgard API is", "not is_solvent: insolvency_count += 1 if insolvency_count == MISSING_FUNDS_THRESHOLD: message", "= True was_healthy = node_data[\"healthy\"] try: # Check whether node", "+ '\\n\\n' + \\ 'Please enter another THORNode address.' inactive_nodes.append(node_address)", "((local_status == 'ready' and remote_status == 'active') or ( local_status", "!= is_currently_catching_up: try: block_height = get_latest_block_height(node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout", "<= node_data.setdefault('block_height', 0) block_height_stuck_count = node_data.setdefault(\"block_height_stuck_count\", 0) if is_stuck: block_height_stuck_count", "is_solvent: insolvency_count += 1 if insolvency_count == MISSING_FUNDS_THRESHOLD: message =", "' + node_address + '\\n' + \\ 'Current block height:", "height stuck at: ' + block_height + '\\n\\n' + \\", "text=message) def check_solvency(context) -> [str, None]: try: asgard_solvency = asgard_solvency_check()", "validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] def did_churn_happen(validator, local_node_statuses, highest_churn_status_since) ->", "= highest_version for node in chat_data.get('nodes', {}).values(): if version.parse(node['version']) <", "+ \" %*\\n\\n\" \\ \"↩️ Bonding ROI: *\" + '{:.2f}'.format(", "\"address\": validator['node_address'], \"bond\": validator['bond'] }) if len(churned_in) or len(churned_out): text", "and remote_status == 'active') or ( local_status == 'active' and", "n: n['version'], node_accounts), key=lambda v: version.parse(v)) last_newest_version = chat_data.get('newest_software_version', None)", "> version.parse(last_newest_version): chat_data['newest_software_version'] = highest_version for node in chat_data.get('nodes', {}).values():", "'{:.2f}'.format( float(network['bondingAPY']) * 100) + \" %* APY\\n\\n\" \\ \"↩️", "== 'ready' and remote_status == 'active') or ( local_status ==", "block_height_stuck_count = node_data.setdefault(\"block_height_stuck_count\", 0) if is_stuck: block_height_stuck_count += 1 if", "'node_address'] in local_node_statuses else \"unknown\" if remote_status != local_status: if", "= [] highest_churn_status_since = 0 for validator in validators: if", "ADDRESSES DID NOT CHANGE 🚨\\n\" context.bot_data['vault_addresses'][chain['chain']] = chain['address'] except Exception", "def check_thorchain_catch_up_status(context, node_address): \"\"\" Check if node is some blocks", "is healthy again! 👌' + '\\n' + \\ 'IP: '", "chat_id=context.job.context['chat_id'], text=message) def check_churning(context): try: validators = get_node_accounts() except Exception", "float(local_node['last_notification_timestamp']) < \\ datetime.timestamp( datetime.now() - timedelta(seconds=local_node['notification_timeout_in_seconds'])) if is_not_blocked: message", "check_solvency_job(context): message = check_solvency(context) if message: try_message_to_all_users(context, text=message) def check_solvency(context)", "(Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") block_height =", "chat_id=chat_id, text=text) else: if block_height_stuck_count >= 1: text = f\"Block", "datetime.now() - timedelta(seconds=local_node['notification_timeout_in_seconds'])) if is_not_blocked: message = build_notification_message_for_active_node(local_node, remote_node, context)", "yggdrasil_solvency = yggdrasil_solvency_check() except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error", "%* APY\" context.bot_data.setdefault(\"vault_addresses\", {}) current_chains = get_pool_addresses_from_any_node() for chain in", "try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_healthy_again_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = True return True except (Timeout,", "validator['node_address']] = validator['status'] def did_churn_happen(validator, local_node_statuses, highest_churn_status_since) -> bool: remote_status", "{node_data['ip_address']}\") return is_stuck = block_height <= node_data.setdefault('block_height', 0) block_height_stuck_count =", "yggdrasil_solvency['is_solvent'] insolvency_count = context.bot_data.setdefault(\"insolvency_count\", 0) message = None if not", "*{tor_to_rune(network['bondMetrics']['totalActiveBond'])}* (total)\\n\\n\" \\ \"⚖️ Bonded/Staked Ratio: *\" + '{:.2f}'.format( int(get_network_security_ratio(network)", "highest_churn_status_since = 0 for validator in validators: if did_churn_happen(validator, local_node_statuses,", "while querying Asgard and Yggdrasil.\") return None except Exception as", "+ node_data['alias'] + '\\n' + \\ 'Node address: ' +", "\"\" for node in churned_out: text += f\"*{node['address']}*\\nBond: *{tor_to_rune(node['bond'])}*\\n\" text", "len(churned_in) or len(churned_out): text = \"🔄 CHURN SUMMARY\\n\" \\ \"THORChain", "local_status = local_node_statuses[validator['node_address']] if validator[ 'node_address'] in local_node_statuses else \"unknown\"", "message = build_notification_message_for_active_node(local_node, remote_node, context) if message: # Update data", "\\ ((local_status == 'ready' and remote_status == 'active') or (", "\\ 'Current block height: ' + block_height try_message_with_home_menu(context=context, chat_id=chat_id, text=text)", "== 'standby')): return True return False def is_thornode_healthy(context, node_address) ->", "message def check_thorchain_catch_up_status(context, node_address): \"\"\" Check if node is some", "if 'slash_points' in changed_fields: text += ' ➡️ ' +", "it doesn't we get an Exception. get_latest_block_height(node_data['ip_address']) if not was_healthy:", "node is some blocks behind with catch up status \"\"\"", "+= \"\\nNodes Removed:\\n\" if len(churned_out) else \"\" for node in", "is_midgard_healthy: text = 'Midgard API is healthy again! 👌' +", "abs(int(local_node['slash_points']) - int(remote_node['slash_points'])) if (len(changed_fields) <= 1) and ('slash_points' in", "If not initialized assuming node was healhty. if \"healthy\" not", "+= ' ➡️ ' + '{:,}'.format(int(remote_node['slash_points'])) return text else: return", "1 if insolvency_count == MISSING_FUNDS_THRESHOLD: message = 'THORChain is *missing", "if message: try_message_to_all_users(context, text=message) def check_solvency(context) -> [str, None]: try:", "text=message) def check_churning(context): try: validators = get_node_accounts() except Exception as", "== 'active') or ( local_status == 'active' and remote_status ==", "[str, None]: try: asgard_solvency = asgard_solvency_check() yggdrasil_solvency = yggdrasil_solvency_check() except", "+= get_insolvent_balances_message(asgard_solvency, yggdrasil_solvency) else: if insolvency_count >= MISSING_FUNDS_THRESHOLD: message =", "enter another THORNode address.' inactive_nodes.append(node_address) try_message_with_home_menu(context=context, chat_id=chat_id, text=text) continue is_not_blocked", "message = None if not is_solvent: insolvency_count += 1 if", "node_address + '\\n\\n' + \\ 'Please enter another THORNode address.'", "+ \\ 'Block height stuck at: ' + block_height +", "VAULT ADDRESSES DID NOT CHANGE 🚨\\n\" context.bot_data['vault_addresses'][chain['chain']] = chain['address'] except", "for validator in validators: context.bot_data['node_statuses'][ validator['node_address']] = validator['status'] return local_node_statuses", "logger.warning(f\"Timeout or Connection error with {node_data['ip_address']}\") return is_stuck = block_height", "+ \\ f\"Node address: {node_address}\\n\" + \\ f\"Block height now", "or Connection error while querying Asgard and Yggdrasil.\") return None", "constants.messages import get_node_health_warning_message, get_node_healthy_again_message from handlers.chat_helpers import try_message_with_home_menu, try_message_to_all_users from", "node_data: node_data['is_catching_up'] = False try: is_currently_catching_up = is_thorchain_catching_up( node_data['ip_address']) except", "\\ 'Please check your Thornode immediately!' else: node_data['is_catching_up'] = False", "[] highest_churn_status_since = 0 for validator in validators: if did_churn_happen(validator,", "datetime.timestamp(datetime.now()) local_node['notification_timeout_in_seconds'] *= NOTIFICATION_TIMEOUT_MULTIPLIER try_message_with_home_menu(context=context, chat_id=chat_id, text=message) else: local_node['notification_timeout_in_seconds'] =", "build_notification_message_for_active_node(local_node, remote_node, context) if message: # Update data local_node['status'] =", "+ '{:,}'.format(int(local_node['slash_points'])) if 'slash_points' in changed_fields: text += ' ➡️", "height is increasing again! 👌\\n\" + \\ f\"IP: {node_data['ip_address']}\\n\" +", "version.parse(v)) last_newest_version = chat_data.get('newest_software_version', None) if last_newest_version is None or", "version.parse(last_newest_version): chat_data['newest_software_version'] = highest_version for node in chat_data.get('nodes', {}).values(): if", "validator['status'] local_status = local_node_statuses[ validator['node_address']] if validator[ 'node_address'] in local_node_statuses", "text += \"\\nSystem:\\n\" try: network = get_network_data() text += f\"📡", "\"\\nSystem:\\n\" try: network = get_network_data() text += f\"📡 Network Security:", "BadStatusException, Exception): if was_healthy: try_message_with_home_menu(context=context, chat_id=chat_id, text=get_node_health_warning_message(node_data)) context.job.context['chat_data']['nodes'][node_address][\"healthy\"] = False", "= 'Block height is not increasing anymore! 💀' + '\\n'", ">= 1: text = f\"Block height is increasing again! 👌\\n\"", "Asgard and Yggdrasil.\") return None except Exception as e: logger.exception(e)", "'Please check your Thornode immediately!' else: node_data['is_catching_up'] = False text", "context.bot_data['node_statuses'] = {} for validator in validators: context.bot_data['node_statuses'][ validator['node_address']] =", "logger.exception(e) try_message_to_all_users(context, text=text) for validator in validators: context.bot_data['node_statuses'][ validator['node_address']] =", "= yggdrasil_solvency_check() except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection error while", "if len(churned_in) else \"\" for node in churned_in: text +=", "is_midgard_healthy = is_midgard_api_healthy(node_data['ip_address']) if was_healthy != is_midgard_healthy: if is_midgard_healthy: text", "> highest_churn_status_since and \\ ((local_status == 'ready' and remote_status ==", "did_churn_happen(validator, local_node_statuses, highest_churn_status_since) -> bool: remote_status = validator['status'] local_status =", "+ \\ 'Please check your Thornode immediately!' else: node_data['is_catching_up'] =", "f\"📡 Network Security: *{network_security_ratio_to_string(get_network_security_ratio(network))}*\\n\\n\" \\ f\"💚 Total Active Bond: *{tor_to_rune(network['bondMetrics']['totalActiveBond'])}*", "check_thorchain_midgard_api(context, node_address=node_address) for node_address in inactive_nodes: del chat_data['nodes'][node_address] def build_notification_message_for_active_node(local_node,", "changed_fields: text += f\" ➡️ {tor_to_rune(int(remote_node['bond']))}\" text += '\\nSlash Points:", "node_data[\"block_height_stuck_count\"] = block_height_stuck_count def check_solvency_job(context): message = check_solvency(context) if message:", "catching up! 💀 ' + '\\n' + \\ 'IP: '", "remote_status = validator['status'] local_status = local_node_statuses[ validator['node_address']] if validator[ 'node_address']", "is_currently_catching_up = is_thorchain_catching_up( node_data['ip_address']) except (Timeout, ConnectionError): logger.warning(f\"Timeout or Connection", "chain['chain'] in context.bot_data['vault_addresses']: if chain['address'] != context.bot_data['vault_addresses'][chain['chain']]: text += f\"\\n\\n🔐" ]
[ "ctr = (y[i/rw_dt]+1)/2.0 print \"CTR: \" + str(ctr) success =", "p = jacobi_to_poly(pdf[i,:]) result[i,:] = p(x) result[i,:] /= result[i,:].sum() return", "zeros(shape=(t.shape[0], prior.shape[0]), dtype=float) result[0,:] = prior for i in range(1,t.shape[0]):", "= zeros(shape=(pdf.shape[0], x.shape[0]), dtype=float) for i in range(0, pdf.shape[0]): p", "= poly_to_jacobi(prior) dt = 0.1 times = arange(0,tmax,dt) x =", "solve(matrix, xc) def jacobi_to_poly(x): result = poly1d([0]) for i in", "result + jacobi(i,a,b)*x[i] return result def propagate_jacobi(pc, t): \"\"\"Takes jacobi", "sqrt(dt) def random_walk(y0, tmax, dt, times = None): dt =", "= int(1.0/dt) prior = prior_in_jacobi rnd = uniform(0,1) if (i", "random_walk(0.35*2-1, tmax, rw_dt) solution_as_x = zeros(shape=(times.size, x.size), dtype=float) solution_as_jacobi =", "= 0.1 times = arange(0,tmax,dt) x = arange(-1,1,0.01) rw_dt =", "solution_as_jacobi = pde_solve(prior, times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt] = transform_to_x(solution_as_jacobi, x) plot(arange(0,4), empirical_ctr,", "nt = int(1.0/dt) prior = prior_in_jacobi rnd = uniform(0,1) if", "= 0.0 b = 3.0 theta=1.0 sigma=sqrt(theta/(2*(a+b+2))) tscale = 0.05", "success = (r < ctr).sum() print \"Empirical: \" + str(success", "range(0,4): nt = int(1.0/dt) prior = prior_in_jacobi rnd = uniform(0,1)", "dt, times = None): dt = dt * tscale def", "def propagate_jacobi(pc, t): \"\"\"Takes jacobi coefficients and propagates them\"\"\" n", "/ abs(y[i]) return (times, y) def beta_prior(s, f): return poly1d(ones(shape=(s,)),", "for i in range(1,y.shape[0]): y[i] = y[i-1] + rhs(y[i-1], times[i])*dt", "y) def beta_prior(s, f): return poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)), True) def poly_to_jacobi(x):", "= theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return exp(-l*t)*pc def truncate_unnecessary_jacobi(p): p_normalized = p /", "(times is None): times = arange(0,tmax,dt) y = zeros(shape=times.shape, dtype=float)", "= zeros(shape=times.shape, dtype=float) y[0] = y0 for i in range(1,y.shape[0]):", "= 0.01 t, y = random_walk(0.35*2-1, tmax, rw_dt) solution_as_x =", "= propagate_jacobi(result[i-1,:], t[i]-t[i-1]) return result def transform_to_x(pdf, x): result =", "times[i])*dt if abs(y[i]) > 1: y[i] = y[i] / abs(y[i])", "jacobi_to_poly_no_invariant(j)) empirical_ctr[i] = success / float(nsamples) solution_as_jacobi = pde_solve(prior, times[i*nt:(i+1)*nt])", "return result def jacobi_to_poly_no_invariant(x): result = poly1d([0]) for i in", "jacobi a = 0.0 b = 3.0 theta=1.0 sigma=sqrt(theta/(2*(a+b+2))) tscale", "def pde_solve(prior, t): result = zeros(shape=(t.shape[0], prior.shape[0]), dtype=float) result[0,:] =", "for i in range(1,t.shape[0]): result[i,:] = propagate_jacobi(result[i-1,:], t[i]-t[i-1]) return result", "dtype=float) for i in range(0, pdf.shape[0]): p = jacobi_to_poly(pdf[i,:]) result[i,:]", "0.01 t, y = random_walk(0.35*2-1, tmax, rw_dt) solution_as_x = zeros(shape=(times.size,", "zeros(shape=(times.size, x.size), dtype=float) solution_as_jacobi = None empirical_ctr = zeros(shape=(4,), dtype=float)", "p(x) result[i,:] /= result[i,:].sum() return result tmax = 4 prior", "rw_dt = 0.01 t, y = random_walk(0.35*2-1, tmax, rw_dt) solution_as_x", "str(ctr) success = (r < ctr).sum() print \"Empirical: \" +", "plot(t, (y+1)/2.0, 'k') imshow(solution_as_x.transpose(), origin='lower', extent=[0,tmax,0,1]) xlabel(\"time\") ylabel(\"CTR\") title(\"Bayesian Estimate", "0.05 invariant_distribution = poly1d( [-1 for x in range(int(a))], True)*poly1d(", "in range(0, pdf.shape[0]): p = jacobi_to_poly(pdf[i,:]) result[i,:] = p(x) result[i,:]", "i in range(0, pdf.shape[0]): p = jacobi_to_poly(pdf[i,:]) result[i,:] = p(x)", "= transform_to_x(solution_as_jacobi, x) plot(arange(0,4), empirical_ctr, 'go') plot(t, (y+1)/2.0, 'k') imshow(solution_as_x.transpose(),", "times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt] = transform_to_x(solution_as_jacobi, x) plot(arange(0,4), empirical_ctr, 'go') plot(t, (y+1)/2.0,", "\"\"\"x is a poly1d object\"\"\" xc = x.coeffs N =", "float(nsamples)) evidence = beta_prior( nsamples - success, success) prior =", "random_walk(y0, tmax, dt, times = None): dt = dt *", "nsamples - success, success) prior = None j = truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1])", "in range(0,4): nt = int(1.0/dt) prior = prior_in_jacobi rnd =", "1e-4)] def pde_solve(prior, t): result = zeros(shape=(t.shape[0], prior.shape[0]), dtype=float) result[0,:]", "'go') plot(t, (y+1)/2.0, 'k') imshow(solution_as_x.transpose(), origin='lower', extent=[0,tmax,0,1]) xlabel(\"time\") ylabel(\"CTR\") title(\"Bayesian", "from scipy.stats import norm, uniform, beta from scipy.special import jacobi", "return theta*n*(n+a+b+1)/(a+b+2) gaussian_var = norm() def dW(dt): return norm.rvs() /", "i in range(x.shape[0]): result = result + jacobi(i,a,b)*x[i] return result", "[-1 for x in range(int(a))], True)*poly1d( [1 for x in", "y[i-1] + rhs(y[i-1], times[i])*dt if abs(y[i]) > 1: y[i] =", "4 prior = beta_prior(40, 20) prior_in_jacobi = poly_to_jacobi(prior) dt =", "jacobi(i,a,b)*x[i] return result def propagate_jacobi(pc, t): \"\"\"Takes jacobi coefficients and", "dtype=float) for i in range(0,4): nt = int(1.0/dt) prior =", "= result + (jacobi(i,a,b)*invariant_distribution)*x[i] return result def jacobi_to_poly_no_invariant(x): result =", "x): result = zeros(shape=(pdf.shape[0], x.shape[0]), dtype=float) for i in range(0,", "truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior = poly_to_jacobi(evidence * jacobi_to_poly_no_invariant(j)) empirical_ctr[i] = success /", "l = theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return exp(-l*t)*pc def truncate_unnecessary_jacobi(p): p_normalized = p", "= 40 r = rnd.rvs(nsamples) ctr = (y[i/rw_dt]+1)/2.0 print \"CTR:", "True) def poly_to_jacobi(x): \"\"\"x is a poly1d object\"\"\" xc =", "N = x.order+1 matrix = zeros(shape=(N,N), dtype=float) for i in", "solve from scipy.integrate import odeint from scipy.stats import norm, uniform,", "/ (abs(p).sum()) cs = cumsum(abs(p_normalized[::-1]))[::-1] return p_normalized[where(abs(cs) > 1e-4)] def", "propagate_jacobi(result[i-1,:], t[i]-t[i-1]) return result def transform_to_x(pdf, x): result = zeros(shape=(pdf.shape[0],", "0.0 b = 3.0 theta=1.0 sigma=sqrt(theta/(2*(a+b+2))) tscale = 0.05 invariant_distribution", "+ jacobi(i,a,b)*x[i] return result def propagate_jacobi(pc, t): \"\"\"Takes jacobi coefficients", "i in range(N): matrix[N-i-1:N, i] = jacobi(i,a,b).coeffs return solve(matrix, xc)", "float(nsamples) solution_as_jacobi = pde_solve(prior, times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt] = transform_to_x(solution_as_jacobi, x) plot(arange(0,4),", "prior for i in range(1,t.shape[0]): result[i,:] = propagate_jacobi(result[i-1,:], t[i]-t[i-1]) return", "uniform(0,1) if (i > 0): nsamples = 40 r =", "def truncate_unnecessary_jacobi(p): p_normalized = p / (abs(p).sum()) cs = cumsum(abs(p_normalized[::-1]))[::-1]", "def poly_to_jacobi(x): \"\"\"x is a poly1d object\"\"\" xc = x.coeffs", "= jacobi(i,a,b).coeffs return solve(matrix, xc) def jacobi_to_poly(x): result = poly1d([0])", "print \"Empirical: \" + str(success / float(nsamples)) evidence = beta_prior(", "'k') imshow(solution_as_x.transpose(), origin='lower', extent=[0,tmax,0,1]) xlabel(\"time\") ylabel(\"CTR\") title(\"Bayesian Estimate of CTR\")", "transform_to_x(solution_as_jacobi, x) plot(arange(0,4), empirical_ctr, 'go') plot(t, (y+1)/2.0, 'k') imshow(solution_as_x.transpose(), origin='lower',", "= arange(-1,1,0.01) rw_dt = 0.01 t, y = random_walk(0.35*2-1, tmax,", "= rnd.rvs(nsamples) ctr = (y[i/rw_dt]+1)/2.0 print \"CTR: \" + str(ctr)", "import * from numpy.linalg import solve from scipy.integrate import odeint", "import norm, uniform, beta from scipy.special import jacobi a =", "range(x.shape[0]): result = result + jacobi(i,a,b)*x[i] return result def propagate_jacobi(pc,", "arange(pc.shape[0], dtype=float) l = theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return exp(-l*t)*pc def truncate_unnecessary_jacobi(p): p_normalized", "xc) def jacobi_to_poly(x): result = poly1d([0]) for i in range(x.shape[0]):", "y[0] = y0 for i in range(1,y.shape[0]): y[i] = y[i-1]", "t, y = random_walk(0.35*2-1, tmax, rw_dt) solution_as_x = zeros(shape=(times.size, x.size),", "dW(dt): return norm.rvs() / sqrt(dt) def random_walk(y0, tmax, dt, times", "gaussian_var = norm() def dW(dt): return norm.rvs() / sqrt(dt) def", "= zeros(shape=(4,), dtype=float) for i in range(0,4): nt = int(1.0/dt)", "sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if (times is None): times = arange(0,tmax,dt) y =", "/= result[i,:].sum() return result tmax = 4 prior = beta_prior(40,", "def rhs(y,t): return -theta*(y-(a-b)/(a+b+2)) + sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if (times is None):", "return p_normalized[where(abs(cs) > 1e-4)] def pde_solve(prior, t): result = zeros(shape=(t.shape[0],", "def jacobi_to_poly_no_invariant(x): result = poly1d([0]) for i in range(x.shape[0]): result", "= 0.05 invariant_distribution = poly1d( [-1 for x in range(int(a))],", "poly1d([0]) for i in range(x.shape[0]): result = result + (jacobi(i,a,b)*invariant_distribution)*x[i]", "= jacobi_to_poly(pdf[i,:]) result[i,:] = p(x) result[i,:] /= result[i,:].sum() return result", "< ctr).sum() print \"Empirical: \" + str(success / float(nsamples)) evidence", "i in range(0,4): nt = int(1.0/dt) prior = prior_in_jacobi rnd", "rnd.rvs(nsamples) ctr = (y[i/rw_dt]+1)/2.0 print \"CTR: \" + str(ctr) success", "from numpy.linalg import solve from scipy.integrate import odeint from scipy.stats", "result tmax = 4 prior = beta_prior(40, 20) prior_in_jacobi =", "exp(-l*t)*pc def truncate_unnecessary_jacobi(p): p_normalized = p / (abs(p).sum()) cs =", "(abs(p).sum()) cs = cumsum(abs(p_normalized[::-1]))[::-1] return p_normalized[where(abs(cs) > 1e-4)] def pde_solve(prior,", "str(success / float(nsamples)) evidence = beta_prior( nsamples - success, success)", "beta_prior(s, f): return poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)), True) def poly_to_jacobi(x): \"\"\"x is", "norm.rvs() / sqrt(dt) def random_walk(y0, tmax, dt, times = None):", "success) prior = None j = truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior = poly_to_jacobi(evidence", "range(1,t.shape[0]): result[i,:] = propagate_jacobi(result[i-1,:], t[i]-t[i-1]) return result def transform_to_x(pdf, x):", "tmax = 4 prior = beta_prior(40, 20) prior_in_jacobi = poly_to_jacobi(prior)", "tmax, rw_dt) solution_as_x = zeros(shape=(times.size, x.size), dtype=float) solution_as_jacobi = None", "beta from scipy.special import jacobi a = 0.0 b =", "result def propagate_jacobi(pc, t): \"\"\"Takes jacobi coefficients and propagates them\"\"\"", "prior_in_jacobi = poly_to_jacobi(prior) dt = 0.1 times = arange(0,tmax,dt) x", "y[i] = y[i] / abs(y[i]) return (times, y) def beta_prior(s,", "result = result + jacobi(i,a,b)*x[i] return result def propagate_jacobi(pc, t):", "result[i,:] = propagate_jacobi(result[i-1,:], t[i]-t[i-1]) return result def transform_to_x(pdf, x): result", "= random_walk(0.35*2-1, tmax, rw_dt) solution_as_x = zeros(shape=(times.size, x.size), dtype=float) solution_as_jacobi", "abs(y[i]) return (times, y) def beta_prior(s, f): return poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)),", "/ sqrt(dt) def random_walk(y0, tmax, dt, times = None): dt", "(y+1)/2.0, 'k') imshow(solution_as_x.transpose(), origin='lower', extent=[0,tmax,0,1]) xlabel(\"time\") ylabel(\"CTR\") title(\"Bayesian Estimate of", "evidence = beta_prior( nsamples - success, success) prior = None", "empirical_ctr[i] = success / float(nsamples) solution_as_jacobi = pde_solve(prior, times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt]", "matrix[N-i-1:N, i] = jacobi(i,a,b).coeffs return solve(matrix, xc) def jacobi_to_poly(x): result", "result = zeros(shape=(pdf.shape[0], x.shape[0]), dtype=float) for i in range(0, pdf.shape[0]):", "rhs(y,t): return -theta*(y-(a-b)/(a+b+2)) + sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if (times is None): times", "return poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)), True) def poly_to_jacobi(x): \"\"\"x is a poly1d", "for i in range(x.shape[0]): result = result + jacobi(i,a,b)*x[i] return", "return norm.rvs() / sqrt(dt) def random_walk(y0, tmax, dt, times =", "(i > 0): nsamples = 40 r = rnd.rvs(nsamples) ctr", "def transform_to_x(pdf, x): result = zeros(shape=(pdf.shape[0], x.shape[0]), dtype=float) for i", "= prior for i in range(1,t.shape[0]): result[i,:] = propagate_jacobi(result[i-1,:], t[i]-t[i-1])", "(times, y) def beta_prior(s, f): return poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)), True) def", "= truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior = poly_to_jacobi(evidence * jacobi_to_poly_no_invariant(j)) empirical_ctr[i] = success", "x in range(int(b))], True) def eigenvalue(n): return theta*n*(n+a+b+1)/(a+b+2) gaussian_var =", "prior = poly_to_jacobi(evidence * jacobi_to_poly_no_invariant(j)) empirical_ctr[i] = success / float(nsamples)", "tmax, dt, times = None): dt = dt * tscale", "/ float(nsamples) solution_as_jacobi = pde_solve(prior, times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt] = transform_to_x(solution_as_jacobi, x)", "True) def eigenvalue(n): return theta*n*(n+a+b+1)/(a+b+2) gaussian_var = norm() def dW(dt):", "import jacobi a = 0.0 b = 3.0 theta=1.0 sigma=sqrt(theta/(2*(a+b+2)))", "int(1.0/dt) prior = prior_in_jacobi rnd = uniform(0,1) if (i >", "tscale def rhs(y,t): return -theta*(y-(a-b)/(a+b+2)) + sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if (times is", "return result def propagate_jacobi(pc, t): \"\"\"Takes jacobi coefficients and propagates", "= zeros(shape=(times.size, x.size), dtype=float) solution_as_jacobi = None empirical_ctr = zeros(shape=(4,),", "\"CTR: \" + str(ctr) success = (r < ctr).sum() print", "import odeint from scipy.stats import norm, uniform, beta from scipy.special", "result def transform_to_x(pdf, x): result = zeros(shape=(pdf.shape[0], x.shape[0]), dtype=float) for", "pde_solve(prior, t): result = zeros(shape=(t.shape[0], prior.shape[0]), dtype=float) result[0,:] = prior", "zeros(shape=times.shape, dtype=float) y[0] = y0 for i in range(1,y.shape[0]): y[i]", "x.shape[0]), dtype=float) for i in range(0, pdf.shape[0]): p = jacobi_to_poly(pdf[i,:])", "p / (abs(p).sum()) cs = cumsum(abs(p_normalized[::-1]))[::-1] return p_normalized[where(abs(cs) > 1e-4)]", "for i in range(0, pdf.shape[0]): p = jacobi_to_poly(pdf[i,:]) result[i,:] =", "+ sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if (times is None): times = arange(0,tmax,dt) y", "for i in range(N): matrix[N-i-1:N, i] = jacobi(i,a,b).coeffs return solve(matrix,", "beta_prior(40, 20) prior_in_jacobi = poly_to_jacobi(prior) dt = 0.1 times =", "j = truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior = poly_to_jacobi(evidence * jacobi_to_poly_no_invariant(j)) empirical_ctr[i] =", "return (times, y) def beta_prior(s, f): return poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)), True)", "+ str(ctr) success = (r < ctr).sum() print \"Empirical: \"", "if abs(y[i]) > 1: y[i] = y[i] / abs(y[i]) return", "rw_dt) solution_as_x = zeros(shape=(times.size, x.size), dtype=float) solution_as_jacobi = None empirical_ctr", "result def jacobi_to_poly_no_invariant(x): result = poly1d([0]) for i in range(x.shape[0]):", "/ float(nsamples)) evidence = beta_prior( nsamples - success, success) prior", "= beta_prior( nsamples - success, success) prior = None j", "zeros(shape=(pdf.shape[0], x.shape[0]), dtype=float) for i in range(0, pdf.shape[0]): p =", "= norm() def dW(dt): return norm.rvs() / sqrt(dt) def random_walk(y0,", "def dW(dt): return norm.rvs() / sqrt(dt) def random_walk(y0, tmax, dt,", "in range(int(a))], True)*poly1d( [1 for x in range(int(b))], True) def", "times = arange(0,tmax,dt) x = arange(-1,1,0.01) rw_dt = 0.01 t,", "ctr).sum() print \"Empirical: \" + str(success / float(nsamples)) evidence =", "times = None): dt = dt * tscale def rhs(y,t):", "= y[i] / abs(y[i]) return (times, y) def beta_prior(s, f):", "= prior_in_jacobi rnd = uniform(0,1) if (i > 0): nsamples", "(jacobi(i,a,b)*invariant_distribution)*x[i] return result def jacobi_to_poly_no_invariant(x): result = poly1d([0]) for i", "= beta_prior(40, 20) prior_in_jacobi = poly_to_jacobi(prior) dt = 0.1 times", "arange(0,tmax,dt) x = arange(-1,1,0.01) rw_dt = 0.01 t, y =", "invariant_distribution = poly1d( [-1 for x in range(int(a))], True)*poly1d( [1", "imshow(solution_as_x.transpose(), origin='lower', extent=[0,tmax,0,1]) xlabel(\"time\") ylabel(\"CTR\") title(\"Bayesian Estimate of CTR\") colorbar()", "dtype=float) result[0,:] = prior for i in range(1,t.shape[0]): result[i,:] =", "1: y[i] = y[i] / abs(y[i]) return (times, y) def", "result[i,:].sum() return result tmax = 4 prior = beta_prior(40, 20)", "poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)), True) def poly_to_jacobi(x): \"\"\"x is a poly1d object\"\"\"", "def jacobi_to_poly(x): result = poly1d([0]) for i in range(x.shape[0]): result", "empirical_ctr = zeros(shape=(4,), dtype=float) for i in range(0,4): nt =", "x.coeffs N = x.order+1 matrix = zeros(shape=(N,N), dtype=float) for i", "[1 for x in range(int(b))], True) def eigenvalue(n): return theta*n*(n+a+b+1)/(a+b+2)", "= uniform(0,1) if (i > 0): nsamples = 40 r", "prior = beta_prior(40, 20) prior_in_jacobi = poly_to_jacobi(prior) dt = 0.1", "propagate_jacobi(pc, t): \"\"\"Takes jacobi coefficients and propagates them\"\"\" n =", "pde_solve(prior, times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt] = transform_to_x(solution_as_jacobi, x) plot(arange(0,4), empirical_ctr, 'go') plot(t,", "jacobi coefficients and propagates them\"\"\" n = arange(pc.shape[0], dtype=float) l", "range(int(a))], True)*poly1d( [1 for x in range(int(b))], True) def eigenvalue(n):", "- success, success) prior = None j = truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior", "dtype=float) solution_as_jacobi = None empirical_ctr = zeros(shape=(4,), dtype=float) for i", "i] = jacobi(i,a,b).coeffs return solve(matrix, xc) def jacobi_to_poly(x): result =", "scipy.integrate import odeint from scipy.stats import norm, uniform, beta from", "empirical_ctr, 'go') plot(t, (y+1)/2.0, 'k') imshow(solution_as_x.transpose(), origin='lower', extent=[0,tmax,0,1]) xlabel(\"time\") ylabel(\"CTR\")", "def beta_prior(s, f): return poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)), True) def poly_to_jacobi(x): \"\"\"x", "return result def transform_to_x(pdf, x): result = zeros(shape=(pdf.shape[0], x.shape[0]), dtype=float)", "result[i,:] /= result[i,:].sum() return result tmax = 4 prior =", "= (y[i/rw_dt]+1)/2.0 print \"CTR: \" + str(ctr) success = (r", "matrix = zeros(shape=(N,N), dtype=float) for i in range(N): matrix[N-i-1:N, i]", "jacobi(i,a,b).coeffs return solve(matrix, xc) def jacobi_to_poly(x): result = poly1d([0]) for", "= p(x) result[i,:] /= result[i,:].sum() return result tmax = 4", "range(int(b))], True) def eigenvalue(n): return theta*n*(n+a+b+1)/(a+b+2) gaussian_var = norm() def", "prior = None j = truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior = poly_to_jacobi(evidence *", "dt = 0.1 times = arange(0,tmax,dt) x = arange(-1,1,0.01) rw_dt", "them\"\"\" n = arange(pc.shape[0], dtype=float) l = theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return exp(-l*t)*pc", "= None j = truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior = poly_to_jacobi(evidence * jacobi_to_poly_no_invariant(j))", "for i in range(0,4): nt = int(1.0/dt) prior = prior_in_jacobi", "poly_to_jacobi(prior) dt = 0.1 times = arange(0,tmax,dt) x = arange(-1,1,0.01)", "is None): times = arange(0,tmax,dt) y = zeros(shape=times.shape, dtype=float) y[0]", "norm() def dW(dt): return norm.rvs() / sqrt(dt) def random_walk(y0, tmax,", "dt * tscale def rhs(y,t): return -theta*(y-(a-b)/(a+b+2)) + sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if", "i in range(x.shape[0]): result = result + (jacobi(i,a,b)*invariant_distribution)*x[i] return result", "arange(-1,1,0.01) rw_dt = 0.01 t, y = random_walk(0.35*2-1, tmax, rw_dt)", "* tscale def rhs(y,t): return -theta*(y-(a-b)/(a+b+2)) + sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if (times", "plot(arange(0,4), empirical_ctr, 'go') plot(t, (y+1)/2.0, 'k') imshow(solution_as_x.transpose(), origin='lower', extent=[0,tmax,0,1]) xlabel(\"time\")", "coefficients and propagates them\"\"\" n = arange(pc.shape[0], dtype=float) l =", "p_normalized[where(abs(cs) > 1e-4)] def pde_solve(prior, t): result = zeros(shape=(t.shape[0], prior.shape[0]),", "rnd = uniform(0,1) if (i > 0): nsamples = 40", "> 0): nsamples = 40 r = rnd.rvs(nsamples) ctr =", "* from numpy import * from numpy.linalg import solve from", "dt = dt * tscale def rhs(y,t): return -theta*(y-(a-b)/(a+b+2)) +", "import * from numpy import * from numpy.linalg import solve", "def random_walk(y0, tmax, dt, times = None): dt = dt", "\" + str(ctr) success = (r < ctr).sum() print \"Empirical:", "prior.shape[0]), dtype=float) result[0,:] = prior for i in range(1,t.shape[0]): result[i,:]", "None j = truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior = poly_to_jacobi(evidence * jacobi_to_poly_no_invariant(j)) empirical_ctr[i]", "solution_as_x[i*nt:(i+1)*nt] = transform_to_x(solution_as_jacobi, x) plot(arange(0,4), empirical_ctr, 'go') plot(t, (y+1)/2.0, 'k')", "range(N): matrix[N-i-1:N, i] = jacobi(i,a,b).coeffs return solve(matrix, xc) def jacobi_to_poly(x):", "result + (jacobi(i,a,b)*invariant_distribution)*x[i] return result def jacobi_to_poly_no_invariant(x): result = poly1d([0])", "result[i,:] = p(x) result[i,:] /= result[i,:].sum() return result tmax =", "None): dt = dt * tscale def rhs(y,t): return -theta*(y-(a-b)/(a+b+2))", "success / float(nsamples) solution_as_jacobi = pde_solve(prior, times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt] = transform_to_x(solution_as_jacobi,", "jacobi_to_poly_no_invariant(x): result = poly1d([0]) for i in range(x.shape[0]): result =", "y[i] = y[i-1] + rhs(y[i-1], times[i])*dt if abs(y[i]) > 1:", "eigenvalue(n): return theta*n*(n+a+b+1)/(a+b+2) gaussian_var = norm() def dW(dt): return norm.rvs()", "result = result + (jacobi(i,a,b)*invariant_distribution)*x[i] return result def jacobi_to_poly_no_invariant(x): result", "tscale = 0.05 invariant_distribution = poly1d( [-1 for x in", "poly1d object\"\"\" xc = x.coeffs N = x.order+1 matrix =", "from scipy.integrate import odeint from scipy.stats import norm, uniform, beta", "poly1d( [-1 for x in range(int(a))], True)*poly1d( [1 for x", "= (r < ctr).sum() print \"Empirical: \" + str(success /", "= y[i-1] + rhs(y[i-1], times[i])*dt if abs(y[i]) > 1: y[i]", "success, success) prior = None j = truncate_unnecessary_jacobi(solution_as_jacobi[int(1/dt)-1]) prior =", "40 r = rnd.rvs(nsamples) ctr = (y[i/rw_dt]+1)/2.0 print \"CTR: \"", "pdf.shape[0]): p = jacobi_to_poly(pdf[i,:]) result[i,:] = p(x) result[i,:] /= result[i,:].sum()", "p_normalized = p / (abs(p).sum()) cs = cumsum(abs(p_normalized[::-1]))[::-1] return p_normalized[where(abs(cs)", "prior = prior_in_jacobi rnd = uniform(0,1) if (i > 0):", "numpy import * from numpy.linalg import solve from scipy.integrate import", "(r < ctr).sum() print \"Empirical: \" + str(success / float(nsamples))", "= 3.0 theta=1.0 sigma=sqrt(theta/(2*(a+b+2))) tscale = 0.05 invariant_distribution = poly1d(", "b = 3.0 theta=1.0 sigma=sqrt(theta/(2*(a+b+2))) tscale = 0.05 invariant_distribution =", "\" + str(success / float(nsamples)) evidence = beta_prior( nsamples -", "y0 for i in range(1,y.shape[0]): y[i] = y[i-1] + rhs(y[i-1],", "zeros(shape=(N,N), dtype=float) for i in range(N): matrix[N-i-1:N, i] = jacobi(i,a,b).coeffs", "in range(int(b))], True) def eigenvalue(n): return theta*n*(n+a+b+1)/(a+b+2) gaussian_var = norm()", "t[i]-t[i-1]) return result def transform_to_x(pdf, x): result = zeros(shape=(pdf.shape[0], x.shape[0]),", "= None empirical_ctr = zeros(shape=(4,), dtype=float) for i in range(0,4):", "jacobi_to_poly(pdf[i,:]) result[i,:] = p(x) result[i,:] /= result[i,:].sum() return result tmax", "None empirical_ctr = zeros(shape=(4,), dtype=float) for i in range(0,4): nt", "= dt * tscale def rhs(y,t): return -theta*(y-(a-b)/(a+b+2)) + sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale)", "= result + jacobi(i,a,b)*x[i] return result def propagate_jacobi(pc, t): \"\"\"Takes", "in range(1,t.shape[0]): result[i,:] = propagate_jacobi(result[i-1,:], t[i]-t[i-1]) return result def transform_to_x(pdf,", "f): return poly1d(ones(shape=(s,)), True)*poly1d(-1*ones(shape=(f,)), True) def poly_to_jacobi(x): \"\"\"x is a", "= poly1d( [-1 for x in range(int(a))], True)*poly1d( [1 for", "= arange(pc.shape[0], dtype=float) l = theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return exp(-l*t)*pc def truncate_unnecessary_jacobi(p):", "y = random_walk(0.35*2-1, tmax, rw_dt) solution_as_x = zeros(shape=(times.size, x.size), dtype=float)", "truncate_unnecessary_jacobi(p): p_normalized = p / (abs(p).sum()) cs = cumsum(abs(p_normalized[::-1]))[::-1] return", "solution_as_x = zeros(shape=(times.size, x.size), dtype=float) solution_as_jacobi = None empirical_ctr =", "\"\"\"Takes jacobi coefficients and propagates them\"\"\" n = arange(pc.shape[0], dtype=float)", "in range(1,y.shape[0]): y[i] = y[i-1] + rhs(y[i-1], times[i])*dt if abs(y[i])", "result = zeros(shape=(t.shape[0], prior.shape[0]), dtype=float) result[0,:] = prior for i", "= x.order+1 matrix = zeros(shape=(N,N), dtype=float) for i in range(N):", "beta_prior( nsamples - success, success) prior = None j =", "in range(x.shape[0]): result = result + (jacobi(i,a,b)*invariant_distribution)*x[i] return result def", "import solve from scipy.integrate import odeint from scipy.stats import norm,", "= pde_solve(prior, times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt] = transform_to_x(solution_as_jacobi, x) plot(arange(0,4), empirical_ctr, 'go')", "dtype=float) for i in range(N): matrix[N-i-1:N, i] = jacobi(i,a,b).coeffs return", "print \"CTR: \" + str(ctr) success = (r < ctr).sum()", "+ (jacobi(i,a,b)*invariant_distribution)*x[i] return result def jacobi_to_poly_no_invariant(x): result = poly1d([0]) for", "propagates them\"\"\" n = arange(pc.shape[0], dtype=float) l = theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return", "cumsum(abs(p_normalized[::-1]))[::-1] return p_normalized[where(abs(cs) > 1e-4)] def pde_solve(prior, t): result =", "from pylab import * from numpy import * from numpy.linalg", "from numpy import * from numpy.linalg import solve from scipy.integrate", "range(1,y.shape[0]): y[i] = y[i-1] + rhs(y[i-1], times[i])*dt if abs(y[i]) >", "if (times is None): times = arange(0,tmax,dt) y = zeros(shape=times.shape,", "sigma=sqrt(theta/(2*(a+b+2))) tscale = 0.05 invariant_distribution = poly1d( [-1 for x", "range(0, pdf.shape[0]): p = jacobi_to_poly(pdf[i,:]) result[i,:] = p(x) result[i,:] /=", "= arange(0,tmax,dt) x = arange(-1,1,0.01) rw_dt = 0.01 t, y", "in range(x.shape[0]): result = result + jacobi(i,a,b)*x[i] return result def", "solution_as_jacobi = None empirical_ctr = zeros(shape=(4,), dtype=float) for i in", "norm, uniform, beta from scipy.special import jacobi a = 0.0", "= arange(0,tmax,dt) y = zeros(shape=times.shape, dtype=float) y[0] = y0 for", "result = poly1d([0]) for i in range(x.shape[0]): result = result", "return solve(matrix, xc) def jacobi_to_poly(x): result = poly1d([0]) for i", "transform_to_x(pdf, x): result = zeros(shape=(pdf.shape[0], x.shape[0]), dtype=float) for i in", "rhs(y[i-1], times[i])*dt if abs(y[i]) > 1: y[i] = y[i] /", "abs(y[i]) > 1: y[i] = y[i] / abs(y[i]) return (times,", "poly_to_jacobi(x): \"\"\"x is a poly1d object\"\"\" xc = x.coeffs N", "cs = cumsum(abs(p_normalized[::-1]))[::-1] return p_normalized[where(abs(cs) > 1e-4)] def pde_solve(prior, t):", "x) plot(arange(0,4), empirical_ctr, 'go') plot(t, (y+1)/2.0, 'k') imshow(solution_as_x.transpose(), origin='lower', extent=[0,tmax,0,1])", "t): result = zeros(shape=(t.shape[0], prior.shape[0]), dtype=float) result[0,:] = prior for", "scipy.special import jacobi a = 0.0 b = 3.0 theta=1.0", "x.size), dtype=float) solution_as_jacobi = None empirical_ctr = zeros(shape=(4,), dtype=float) for", "scipy.stats import norm, uniform, beta from scipy.special import jacobi a", "y = zeros(shape=times.shape, dtype=float) y[0] = y0 for i in", "= x.coeffs N = x.order+1 matrix = zeros(shape=(N,N), dtype=float) for", "t): \"\"\"Takes jacobi coefficients and propagates them\"\"\" n = arange(pc.shape[0],", "def eigenvalue(n): return theta*n*(n+a+b+1)/(a+b+2) gaussian_var = norm() def dW(dt): return", "poly_to_jacobi(evidence * jacobi_to_poly_no_invariant(j)) empirical_ctr[i] = success / float(nsamples) solution_as_jacobi =", "uniform, beta from scipy.special import jacobi a = 0.0 b", "(y[i/rw_dt]+1)/2.0 print \"CTR: \" + str(ctr) success = (r <", "theta=1.0 sigma=sqrt(theta/(2*(a+b+2))) tscale = 0.05 invariant_distribution = poly1d( [-1 for", "True)*poly1d( [1 for x in range(int(b))], True) def eigenvalue(n): return", "y[i] / abs(y[i]) return (times, y) def beta_prior(s, f): return", "x in range(int(a))], True)*poly1d( [1 for x in range(int(b))], True)", "\"Empirical: \" + str(success / float(nsamples)) evidence = beta_prior( nsamples", "prior_in_jacobi rnd = uniform(0,1) if (i > 0): nsamples =", "* from numpy.linalg import solve from scipy.integrate import odeint from", "= zeros(shape=(t.shape[0], prior.shape[0]), dtype=float) result[0,:] = prior for i in", "= 4 prior = beta_prior(40, 20) prior_in_jacobi = poly_to_jacobi(prior) dt", "-theta*(y-(a-b)/(a+b+2)) + sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if (times is None): times = arange(0,tmax,dt)", "= success / float(nsamples) solution_as_jacobi = pde_solve(prior, times[i*nt:(i+1)*nt]) solution_as_x[i*nt:(i+1)*nt] =", "3.0 theta=1.0 sigma=sqrt(theta/(2*(a+b+2))) tscale = 0.05 invariant_distribution = poly1d( [-1", "if (i > 0): nsamples = 40 r = rnd.rvs(nsamples)", "theta*n*(n+a+b+1)/(a+b+2) gaussian_var = norm() def dW(dt): return norm.rvs() / sqrt(dt)", "= p / (abs(p).sum()) cs = cumsum(abs(p_normalized[::-1]))[::-1] return p_normalized[where(abs(cs) >", "= zeros(shape=(N,N), dtype=float) for i in range(N): matrix[N-i-1:N, i] =", "* jacobi_to_poly_no_invariant(j)) empirical_ctr[i] = success / float(nsamples) solution_as_jacobi = pde_solve(prior,", "is a poly1d object\"\"\" xc = x.coeffs N = x.order+1", "return exp(-l*t)*pc def truncate_unnecessary_jacobi(p): p_normalized = p / (abs(p).sum()) cs", "= y0 for i in range(1,y.shape[0]): y[i] = y[i-1] +", "zeros(shape=(4,), dtype=float) for i in range(0,4): nt = int(1.0/dt) prior", "i in range(1,t.shape[0]): result[i,:] = propagate_jacobi(result[i-1,:], t[i]-t[i-1]) return result def", "dtype=float) y[0] = y0 for i in range(1,y.shape[0]): y[i] =", "for x in range(int(a))], True)*poly1d( [1 for x in range(int(b))],", "return result tmax = 4 prior = beta_prior(40, 20) prior_in_jacobi", "0.1 times = arange(0,tmax,dt) x = arange(-1,1,0.01) rw_dt = 0.01", "= None): dt = dt * tscale def rhs(y,t): return", "in range(N): matrix[N-i-1:N, i] = jacobi(i,a,b).coeffs return solve(matrix, xc) def", "xc = x.coeffs N = x.order+1 matrix = zeros(shape=(N,N), dtype=float)", "= poly_to_jacobi(evidence * jacobi_to_poly_no_invariant(j)) empirical_ctr[i] = success / float(nsamples) solution_as_jacobi", "= cumsum(abs(p_normalized[::-1]))[::-1] return p_normalized[where(abs(cs) > 1e-4)] def pde_solve(prior, t): result", "result[0,:] = prior for i in range(1,t.shape[0]): result[i,:] = propagate_jacobi(result[i-1,:],", "0): nsamples = 40 r = rnd.rvs(nsamples) ctr = (y[i/rw_dt]+1)/2.0", "> 1e-4)] def pde_solve(prior, t): result = zeros(shape=(t.shape[0], prior.shape[0]), dtype=float)", "and propagates them\"\"\" n = arange(pc.shape[0], dtype=float) l = theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale", "+ rhs(y[i-1], times[i])*dt if abs(y[i]) > 1: y[i] = y[i]", "jacobi_to_poly(x): result = poly1d([0]) for i in range(x.shape[0]): result =", "times = arange(0,tmax,dt) y = zeros(shape=times.shape, dtype=float) y[0] = y0", "from scipy.special import jacobi a = 0.0 b = 3.0", "i in range(1,y.shape[0]): y[i] = y[i-1] + rhs(y[i-1], times[i])*dt if", "object\"\"\" xc = x.coeffs N = x.order+1 matrix = zeros(shape=(N,N),", "20) prior_in_jacobi = poly_to_jacobi(prior) dt = 0.1 times = arange(0,tmax,dt)", "= poly1d([0]) for i in range(x.shape[0]): result = result +", "range(x.shape[0]): result = result + (jacobi(i,a,b)*invariant_distribution)*x[i] return result def jacobi_to_poly_no_invariant(x):", "x.order+1 matrix = zeros(shape=(N,N), dtype=float) for i in range(N): matrix[N-i-1:N,", "x = arange(-1,1,0.01) rw_dt = 0.01 t, y = random_walk(0.35*2-1,", "arange(0,tmax,dt) y = zeros(shape=times.shape, dtype=float) y[0] = y0 for i", "+ str(success / float(nsamples)) evidence = beta_prior( nsamples - success,", "origin='lower', extent=[0,tmax,0,1]) xlabel(\"time\") ylabel(\"CTR\") title(\"Bayesian Estimate of CTR\") colorbar() show()", "odeint from scipy.stats import norm, uniform, beta from scipy.special import", "numpy.linalg import solve from scipy.integrate import odeint from scipy.stats import", "poly1d([0]) for i in range(x.shape[0]): result = result + jacobi(i,a,b)*x[i]", "theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return exp(-l*t)*pc def truncate_unnecessary_jacobi(p): p_normalized = p / (abs(p).sum())", "True)*poly1d(-1*ones(shape=(f,)), True) def poly_to_jacobi(x): \"\"\"x is a poly1d object\"\"\" xc", "for x in range(int(b))], True) def eigenvalue(n): return theta*n*(n+a+b+1)/(a+b+2) gaussian_var", "n = arange(pc.shape[0], dtype=float) l = theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return exp(-l*t)*pc def", "> 1: y[i] = y[i] / abs(y[i]) return (times, y)", "a poly1d object\"\"\" xc = x.coeffs N = x.order+1 matrix", "pylab import * from numpy import * from numpy.linalg import", "return -theta*(y-(a-b)/(a+b+2)) + sqrt(2*theta*(1-y*y)/(a+b+2))*dW(dt/tscale) if (times is None): times =", "dtype=float) l = theta*n*(n+a+b+1.0)/(a+b+2.0)*tscale return exp(-l*t)*pc def truncate_unnecessary_jacobi(p): p_normalized =", "a = 0.0 b = 3.0 theta=1.0 sigma=sqrt(theta/(2*(a+b+2))) tscale =", "None): times = arange(0,tmax,dt) y = zeros(shape=times.shape, dtype=float) y[0] =", "for i in range(x.shape[0]): result = result + (jacobi(i,a,b)*invariant_distribution)*x[i] return", "nsamples = 40 r = rnd.rvs(nsamples) ctr = (y[i/rw_dt]+1)/2.0 print", "r = rnd.rvs(nsamples) ctr = (y[i/rw_dt]+1)/2.0 print \"CTR: \" +" ]
[ "PasswordField('Password', validators=[DataRequired()]) class PostForm(FlaskForm): content = TextAreaField(\"What's Up?\", validators =", "if User.select().where(User.email == field.data).exists(): raise ValidationError('User with this email already", "User.select().where(User.email == field.data).exists(): raise ValidationError('User with this email already exists.')", "Length(min=8), EqualTo('<PASSWORD>', message = 'Passwords must match') ]) password2 =", "forms are not just about display, instead they are more", "(\"Username should be one word, letters, numbers and underscores only.\")", "]) password = PasswordField( 'Password', validators=[ DataRequired(), Length(min=8), EqualTo('<PASSWORD>', message", "username = StringField( 'Username', # is the label validators=[ DataRequired(),", "the label validators=[ DataRequired(), Regexp( r'^[a-zA-Z0-9_]+$', message = (\"Username should", "import FlaskForm from wtforms import StringField, PasswordField, TextAreaField from wtforms.validators", "not just about display, instead they are more of validation", "= (\"Username should be one word, letters, numbers and underscores", "should be one word, letters, numbers and underscores only.\") ),", "wtforms import StringField, PasswordField, TextAreaField from wtforms.validators import (DataRequired, Regexp,", "exists.') class RegisterForm(FlaskForm): username = StringField( 'Username', # is the", "'Password', validators=[ DataRequired(), Length(min=8), EqualTo('<PASSWORD>', message = 'Passwords must match')", "validation # wtf forms protect our site against csrf attacks", "wtf forms protect our site against csrf attacks from flask_wtf", "== field.data).exists(): raise ValidationError('User with this name already exists.') def", "label validators=[ DataRequired(), Regexp( r'^[a-zA-Z0-9_]+$', message = (\"Username should be", "only.\") ), name_exists ]) email = StringField( 'Email', validators=[ DataRequired(),", "import (DataRequired, Regexp, ValidationError, Email, Length, EqualTo) from models import", "'Email', validators=[ DataRequired(), Email(), email_exists ]) password = PasswordField( 'Password',", "password = PasswordField( 'Password', validators=[ DataRequired(), Length(min=8), EqualTo('<PASSWORD>', message =", "EqualTo('<PASSWORD>', message = 'Passwords must match') ]) password2 = PasswordField(", "password2 = PasswordField( '<PASSWORD>', validators=[DataRequired() ]) class LoginForm(FlaskForm): email =", "be one word, letters, numbers and underscores only.\") ), name_exists", "just about display, instead they are more of validation #", "site against csrf attacks from flask_wtf import FlaskForm from wtforms", "(DataRequired, Regexp, ValidationError, Email, Length, EqualTo) from models import User", "PasswordField( '<PASSWORD>', validators=[DataRequired() ]) class LoginForm(FlaskForm): email = StringField('Email', validators=[DataRequired(),", "email = StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField('Password', validators=[DataRequired()]) class", "are more of validation # wtf forms protect our site", "= StringField( 'Username', # is the label validators=[ DataRequired(), Regexp(", "from models import User def name_exists(form, field): if User.select().where(User.username ==", "ValidationError('User with this name already exists.') def email_exists(form, field): if", "# forms are not just about display, instead they are", "this name already exists.') def email_exists(form, field): if User.select().where(User.email ==", "they are more of validation # wtf forms protect our", "DataRequired(), Length(min=8), EqualTo('<PASSWORD>', message = 'Passwords must match') ]) password2", "validators=[DataRequired(), Email()]) password = PasswordField('Password', validators=[DataRequired()]) class PostForm(FlaskForm): content =", "word, letters, numbers and underscores only.\") ), name_exists ]) email", "r'^[a-zA-Z0-9_]+$', message = (\"Username should be one word, letters, numbers", "TextAreaField from wtforms.validators import (DataRequired, Regexp, ValidationError, Email, Length, EqualTo)", "letters, numbers and underscores only.\") ), name_exists ]) email =", "LoginForm(FlaskForm): email = StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField('Password', validators=[DataRequired()])", "field.data).exists(): raise ValidationError('User with this email already exists.') class RegisterForm(FlaskForm):", "validators=[DataRequired()]) class PostForm(FlaskForm): content = TextAreaField(\"What's Up?\", validators = [DataRequired()])", "email = StringField( 'Email', validators=[ DataRequired(), Email(), email_exists ]) password", "if User.select().where(User.username == field.data).exists(): raise ValidationError('User with this name already", "User.select().where(User.username == field.data).exists(): raise ValidationError('User with this name already exists.')", "= StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField('Password', validators=[DataRequired()]) class PostForm(FlaskForm):", "class RegisterForm(FlaskForm): username = StringField( 'Username', # is the label", "forms protect our site against csrf attacks from flask_wtf import", "]) class LoginForm(FlaskForm): email = StringField('Email', validators=[DataRequired(), Email()]) password =", "must match') ]) password2 = PasswordField( '<PASSWORD>', validators=[DataRequired() ]) class", "field): if User.select().where(User.username == field.data).exists(): raise ValidationError('User with this name", "PasswordField( 'Password', validators=[ DataRequired(), Length(min=8), EqualTo('<PASSWORD>', message = 'Passwords must", "validators=[ DataRequired(), Regexp( r'^[a-zA-Z0-9_]+$', message = (\"Username should be one", "Length, EqualTo) from models import User def name_exists(form, field): if", "flask_wtf import FlaskForm from wtforms import StringField, PasswordField, TextAreaField from", "Email()]) password = PasswordField('Password', validators=[DataRequired()]) class PostForm(FlaskForm): content = TextAreaField(\"What's", "already exists.') def email_exists(form, field): if User.select().where(User.email == field.data).exists(): raise", "= PasswordField('Password', validators=[DataRequired()]) class PostForm(FlaskForm): content = TextAreaField(\"What's Up?\", validators", "]) email = StringField( 'Email', validators=[ DataRequired(), Email(), email_exists ])", "about display, instead they are more of validation # wtf", "PasswordField, TextAreaField from wtforms.validators import (DataRequired, Regexp, ValidationError, Email, Length,", "= PasswordField( '<PASSWORD>', validators=[DataRequired() ]) class LoginForm(FlaskForm): email = StringField('Email',", "and underscores only.\") ), name_exists ]) email = StringField( 'Email',", "are not just about display, instead they are more of", "raise ValidationError('User with this name already exists.') def email_exists(form, field):", "from wtforms.validators import (DataRequired, Regexp, ValidationError, Email, Length, EqualTo) from", "validators=[ DataRequired(), Length(min=8), EqualTo('<PASSWORD>', message = 'Passwords must match') ])", "DataRequired(), Email(), email_exists ]) password = PasswordField( 'Password', validators=[ DataRequired(),", "csrf attacks from flask_wtf import FlaskForm from wtforms import StringField,", "EqualTo) from models import User def name_exists(form, field): if User.select().where(User.username", "email already exists.') class RegisterForm(FlaskForm): username = StringField( 'Username', #", "RegisterForm(FlaskForm): username = StringField( 'Username', # is the label validators=[", "our site against csrf attacks from flask_wtf import FlaskForm from", "DataRequired(), Regexp( r'^[a-zA-Z0-9_]+$', message = (\"Username should be one word,", "one word, letters, numbers and underscores only.\") ), name_exists ])", "message = 'Passwords must match') ]) password2 = PasswordField( '<PASSWORD>',", "class LoginForm(FlaskForm): email = StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField('Password',", "attacks from flask_wtf import FlaskForm from wtforms import StringField, PasswordField,", "from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, TextAreaField", "), name_exists ]) email = StringField( 'Email', validators=[ DataRequired(), Email(),", "from wtforms import StringField, PasswordField, TextAreaField from wtforms.validators import (DataRequired,", "StringField, PasswordField, TextAreaField from wtforms.validators import (DataRequired, Regexp, ValidationError, Email,", "display, instead they are more of validation # wtf forms", "wtforms.validators import (DataRequired, Regexp, ValidationError, Email, Length, EqualTo) from models", "ValidationError, Email, Length, EqualTo) from models import User def name_exists(form,", "name_exists(form, field): if User.select().where(User.username == field.data).exists(): raise ValidationError('User with this", "protect our site against csrf attacks from flask_wtf import FlaskForm", "Regexp( r'^[a-zA-Z0-9_]+$', message = (\"Username should be one word, letters,", "User def name_exists(form, field): if User.select().where(User.username == field.data).exists(): raise ValidationError('User", "def name_exists(form, field): if User.select().where(User.username == field.data).exists(): raise ValidationError('User with", "email_exists ]) password = PasswordField( 'Password', validators=[ DataRequired(), Length(min=8), EqualTo('<PASSWORD>',", "of validation # wtf forms protect our site against csrf", "name already exists.') def email_exists(form, field): if User.select().where(User.email == field.data).exists():", "== field.data).exists(): raise ValidationError('User with this email already exists.') class", "# is the label validators=[ DataRequired(), Regexp( r'^[a-zA-Z0-9_]+$', message =", "Email(), email_exists ]) password = PasswordField( 'Password', validators=[ DataRequired(), Length(min=8),", "match') ]) password2 = PasswordField( '<PASSWORD>', validators=[DataRequired() ]) class LoginForm(FlaskForm):", "numbers and underscores only.\") ), name_exists ]) email = StringField(", "models import User def name_exists(form, field): if User.select().where(User.username == field.data).exists():", "is the label validators=[ DataRequired(), Regexp( r'^[a-zA-Z0-9_]+$', message = (\"Username", "underscores only.\") ), name_exists ]) email = StringField( 'Email', validators=[", "field.data).exists(): raise ValidationError('User with this name already exists.') def email_exists(form,", "message = (\"Username should be one word, letters, numbers and", "]) password2 = PasswordField( '<PASSWORD>', validators=[DataRequired() ]) class LoginForm(FlaskForm): email", "instead they are more of validation # wtf forms protect", "with this email already exists.') class RegisterForm(FlaskForm): username = StringField(", "StringField( 'Username', # is the label validators=[ DataRequired(), Regexp( r'^[a-zA-Z0-9_]+$',", "import StringField, PasswordField, TextAreaField from wtforms.validators import (DataRequired, Regexp, ValidationError,", "exists.') def email_exists(form, field): if User.select().where(User.email == field.data).exists(): raise ValidationError('User", "field): if User.select().where(User.email == field.data).exists(): raise ValidationError('User with this email", "already exists.') class RegisterForm(FlaskForm): username = StringField( 'Username', # is", "= PasswordField( 'Password', validators=[ DataRequired(), Length(min=8), EqualTo('<PASSWORD>', message = 'Passwords", "= 'Passwords must match') ]) password2 = PasswordField( '<PASSWORD>', validators=[DataRequired()", "FlaskForm from wtforms import StringField, PasswordField, TextAreaField from wtforms.validators import", "validators=[DataRequired() ]) class LoginForm(FlaskForm): email = StringField('Email', validators=[DataRequired(), Email()]) password", "against csrf attacks from flask_wtf import FlaskForm from wtforms import", "with this name already exists.') def email_exists(form, field): if User.select().where(User.email", "more of validation # wtf forms protect our site against", "StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField('Password', validators=[DataRequired()]) class PostForm(FlaskForm): content", "'<PASSWORD>', validators=[DataRequired() ]) class LoginForm(FlaskForm): email = StringField('Email', validators=[DataRequired(), Email()])", "raise ValidationError('User with this email already exists.') class RegisterForm(FlaskForm): username", "name_exists ]) email = StringField( 'Email', validators=[ DataRequired(), Email(), email_exists", "'Passwords must match') ]) password2 = PasswordField( '<PASSWORD>', validators=[DataRequired() ])", "= StringField( 'Email', validators=[ DataRequired(), Email(), email_exists ]) password =", "password = PasswordField('Password', validators=[DataRequired()]) class PostForm(FlaskForm): content = TextAreaField(\"What's Up?\",", "# wtf forms protect our site against csrf attacks from", "Regexp, ValidationError, Email, Length, EqualTo) from models import User def", "StringField( 'Email', validators=[ DataRequired(), Email(), email_exists ]) password = PasswordField(", "import User def name_exists(form, field): if User.select().where(User.username == field.data).exists(): raise", "'Username', # is the label validators=[ DataRequired(), Regexp( r'^[a-zA-Z0-9_]+$', message", "Email, Length, EqualTo) from models import User def name_exists(form, field):", "def email_exists(form, field): if User.select().where(User.email == field.data).exists(): raise ValidationError('User with", "ValidationError('User with this email already exists.') class RegisterForm(FlaskForm): username =", "validators=[ DataRequired(), Email(), email_exists ]) password = PasswordField( 'Password', validators=[", "this email already exists.') class RegisterForm(FlaskForm): username = StringField( 'Username',", "email_exists(form, field): if User.select().where(User.email == field.data).exists(): raise ValidationError('User with this" ]
[ "% article name_actions_file_msg = lambda: \"What is the name of", "NewLine.both) else \"\" suffix: str = \"\\n\" if spacing in", "NewLine.both) else \"\" stdout.write(\"%s%s%s\" % (prefix, msg, suffix)) def write_error(msg:", "= lambda msg: colour_msg(msg, \"blue\") success_msg = lambda msg: colour_msg(msg,", "stderr.write(\"\\n%s\\n\" % msg) welcome_msg = ( lambda: PANTAM + \"\"\"", "lambda msg, colour: fg(colour) + attr(\"bold\") + msg + attr(\"reset\")", "2 both = 3 def write_msg(msg: str, spacing: NewLine =", "attr(\"reset\") info_msg = lambda msg: colour_msg(msg, \"blue\") success_msg = lambda", "message to stdout\"\"\" prefix: str = \"\\n\" if spacing in", "\"\"\" ) name_index_file_msg = lambda: \"What is the name of", "\"\\n\" if spacing in (NewLine.after, NewLine.both) else \"\" stdout.write(\"%s%s%s\" %", "is the name of your actions folder?\" def create_actions_file_msg(second_run: bool):", "name of your actions file?\" confirm_structure_msg = ( lambda structure:", "+ \"PANTAM\" + attr(\"reset\") colour_msg = lambda msg, colour: fg(colour)", "PANTAM + \"\"\" The microframework for microservices. Let's build your", "both = 3 def write_msg(msg: str, spacing: NewLine = None)", "name of your main script?\" name_actions_folder_msg = lambda: \"What is", "colour_msg(msg, \"blue\") success_msg = lambda msg: colour_msg(msg, \"green\") error_msg =", "lambda: PANTAM + \"\"\" The microframework for microservices. Let's build", "is the name of your main script?\" name_actions_folder_msg = lambda:", "file?\" confirm_structure_msg = ( lambda structure: \"\"\"Your application will look", "action file?\" % article name_actions_file_msg = lambda: \"What is the", "message to stderr\"\"\" stderr.write(\"\\n%s\\n\" % msg) welcome_msg = ( lambda:", "application will look like this: %s Happy to proceed?\"\"\" %", "-> None: \"\"\"Write message to stderr\"\"\" stderr.write(\"\\n%s\\n\" % msg) welcome_msg", "app... \"\"\" ) name_index_file_msg = lambda: \"What is the name", "confirm_structure_msg = ( lambda structure: \"\"\"Your application will look like", "+ attr(\"bold\") + \"PANTAM\" + attr(\"reset\") colour_msg = lambda msg,", "want to create %s action file?\" % article name_actions_file_msg =", "second_run else \"an\" return \"Do you want to create %s", "\"\\n\" if spacing in (NewLine.before, NewLine.both) else \"\" suffix: str", "+ msg + attr(\"reset\") info_msg = lambda msg: colour_msg(msg, \"blue\")", "\"What is the name of your actions folder?\" def create_actions_file_msg(second_run:", "Let's build your app... \"\"\" ) name_index_file_msg = lambda: \"What", "lambda structure: \"\"\"Your application will look like this: %s Happy", "% (prefix, msg, suffix)) def write_error(msg: str) -> None: \"\"\"Write", "\"\"\"Write message to stderr\"\"\" stderr.write(\"\\n%s\\n\" % msg) welcome_msg = (", "stderr, stdout from enum import Enum from colored import fg,", "\"another\" if second_run else \"an\" return \"Do you want to", "name_actions_file_msg = lambda: \"What is the name of your actions", "str = \"\\n\" if spacing in (NewLine.after, NewLine.both) else \"\"", "\"blue\") success_msg = lambda msg: colour_msg(msg, \"green\") error_msg = lambda", "the name of your actions folder?\" def create_actions_file_msg(second_run: bool): \"\"\"Actions", "spacing in (NewLine.after, NewLine.both) else \"\" stdout.write(\"%s%s%s\" % (prefix, msg,", "lambda: \"What is the name of your actions folder?\" def", "article name_actions_file_msg = lambda: \"What is the name of your", "if second_run else \"an\" return \"Do you want to create", "colour_msg = lambda msg, colour: fg(colour) + attr(\"bold\") + msg", "PANTAM: str = fg(\"yellow\") + attr(\"bold\") + \"PANTAM\" + attr(\"reset\")", "success_msg = lambda msg: colour_msg(msg, \"green\") error_msg = lambda msg:", "name_actions_folder_msg = lambda: \"What is the name of your actions", "\"What is the name of your main script?\" name_actions_folder_msg =", "msg, colour: fg(colour) + attr(\"bold\") + msg + attr(\"reset\") info_msg", "actions folder?\" def create_actions_file_msg(second_run: bool): \"\"\"Actions File Message\"\"\" article =", "\"Do you want to create %s action file?\" % article", "\"\"\" The microframework for microservices. Let's build your app... \"\"\"", "colour_msg(msg, \"red\") class NewLine(Enum): before = 1 after = 2", "your actions folder?\" def create_actions_file_msg(second_run: bool): \"\"\"Actions File Message\"\"\" article", "\"\"\"Actions File Message\"\"\" article = \"another\" if second_run else \"an\"", "\"red\") class NewLine(Enum): before = 1 after = 2 both", "\"an\" return \"Do you want to create %s action file?\"", "Message\"\"\" article = \"another\" if second_run else \"an\" return \"Do", "attr(\"bold\") + msg + attr(\"reset\") info_msg = lambda msg: colour_msg(msg,", "Enum from colored import fg, attr PANTAM: str = fg(\"yellow\")", "= \"another\" if second_run else \"an\" return \"Do you want", "from colored import fg, attr PANTAM: str = fg(\"yellow\") +", "structure: \"\"\"Your application will look like this: %s Happy to", "colored import fg, attr PANTAM: str = fg(\"yellow\") + attr(\"bold\")", "file?\" % article name_actions_file_msg = lambda: \"What is the name", "to stdout\"\"\" prefix: str = \"\\n\" if spacing in (NewLine.before,", "import fg, attr PANTAM: str = fg(\"yellow\") + attr(\"bold\") +", "for microservices. Let's build your app... \"\"\" ) name_index_file_msg =", "= lambda msg: colour_msg(msg, \"green\") error_msg = lambda msg: colour_msg(msg,", "msg + attr(\"reset\") info_msg = lambda msg: colour_msg(msg, \"blue\") success_msg", "prefix: str = \"\\n\" if spacing in (NewLine.before, NewLine.both) else", "spacing in (NewLine.before, NewLine.both) else \"\" suffix: str = \"\\n\"", "stdout.write(\"%s%s%s\" % (prefix, msg, suffix)) def write_error(msg: str) -> None:", "None) -> None: \"\"\"Write message to stdout\"\"\" prefix: str =", "+ attr(\"reset\") colour_msg = lambda msg, colour: fg(colour) + attr(\"bold\")", "msg: colour_msg(msg, \"green\") error_msg = lambda msg: colour_msg(msg, \"red\") class", "attr PANTAM: str = fg(\"yellow\") + attr(\"bold\") + \"PANTAM\" +", "in (NewLine.after, NewLine.both) else \"\" stdout.write(\"%s%s%s\" % (prefix, msg, suffix))", "suffix: str = \"\\n\" if spacing in (NewLine.after, NewLine.both) else", "\"PANTAM\" + attr(\"reset\") colour_msg = lambda msg, colour: fg(colour) +", "write_error(msg: str) -> None: \"\"\"Write message to stderr\"\"\" stderr.write(\"\\n%s\\n\" %", "else \"an\" return \"Do you want to create %s action", "create %s action file?\" % article name_actions_file_msg = lambda: \"What", "in (NewLine.before, NewLine.both) else \"\" suffix: str = \"\\n\" if", "(NewLine.before, NewLine.both) else \"\" suffix: str = \"\\n\" if spacing", "msg, suffix)) def write_error(msg: str) -> None: \"\"\"Write message to", "main script?\" name_actions_folder_msg = lambda: \"What is the name of", "lambda msg: colour_msg(msg, \"red\") class NewLine(Enum): before = 1 after", "The microframework for microservices. Let's build your app... \"\"\" )", "colour_msg(msg, \"green\") error_msg = lambda msg: colour_msg(msg, \"red\") class NewLine(Enum):", "lambda msg: colour_msg(msg, \"green\") error_msg = lambda msg: colour_msg(msg, \"red\")", "lambda: \"What is the name of your main script?\" name_actions_folder_msg", "(NewLine.after, NewLine.both) else \"\" stdout.write(\"%s%s%s\" % (prefix, msg, suffix)) def", "(prefix, msg, suffix)) def write_error(msg: str) -> None: \"\"\"Write message", "article = \"another\" if second_run else \"an\" return \"Do you", "you want to create %s action file?\" % article name_actions_file_msg", "return \"Do you want to create %s action file?\" %", "NewLine(Enum): before = 1 after = 2 both = 3", "( lambda: PANTAM + \"\"\" The microframework for microservices. Let's", "def write_error(msg: str) -> None: \"\"\"Write message to stderr\"\"\" stderr.write(\"\\n%s\\n\"", "write_msg(msg: str, spacing: NewLine = None) -> None: \"\"\"Write message", "stderr\"\"\" stderr.write(\"\\n%s\\n\" % msg) welcome_msg = ( lambda: PANTAM +", "your app... \"\"\" ) name_index_file_msg = lambda: \"What is the", "\"\"\"Your application will look like this: %s Happy to proceed?\"\"\"", "from sys import stderr, stdout from enum import Enum from", "( lambda structure: \"\"\"Your application will look like this: %s", "fg(colour) + attr(\"bold\") + msg + attr(\"reset\") info_msg = lambda", "stdout\"\"\" prefix: str = \"\\n\" if spacing in (NewLine.before, NewLine.both)", "= lambda msg: colour_msg(msg, \"red\") class NewLine(Enum): before = 1", "stdout from enum import Enum from colored import fg, attr", "fg, attr PANTAM: str = fg(\"yellow\") + attr(\"bold\") + \"PANTAM\"", "before = 1 after = 2 both = 3 def", "msg) welcome_msg = ( lambda: PANTAM + \"\"\" The microframework", "= \"\\n\" if spacing in (NewLine.after, NewLine.both) else \"\" stdout.write(\"%s%s%s\"", "fg(\"yellow\") + attr(\"bold\") + \"PANTAM\" + attr(\"reset\") colour_msg = lambda", "None: \"\"\"Write message to stdout\"\"\" prefix: str = \"\\n\" if", "= 1 after = 2 both = 3 def write_msg(msg:", "\"\"\"Write message to stdout\"\"\" prefix: str = \"\\n\" if spacing", "microservices. Let's build your app... \"\"\" ) name_index_file_msg = lambda:", "= ( lambda structure: \"\"\"Your application will look like this:", "msg: colour_msg(msg, \"blue\") success_msg = lambda msg: colour_msg(msg, \"green\") error_msg", "% msg) welcome_msg = ( lambda: PANTAM + \"\"\" The", "create_actions_file_msg(second_run: bool): \"\"\"Actions File Message\"\"\" article = \"another\" if second_run", "spacing: NewLine = None) -> None: \"\"\"Write message to stdout\"\"\"", "def write_msg(msg: str, spacing: NewLine = None) -> None: \"\"\"Write", "+ \"\"\" The microframework for microservices. Let's build your app...", "name_index_file_msg = lambda: \"What is the name of your main", "if spacing in (NewLine.before, NewLine.both) else \"\" suffix: str =", "= \"\\n\" if spacing in (NewLine.before, NewLine.both) else \"\" suffix:", "1 after = 2 both = 3 def write_msg(msg: str,", "= ( lambda: PANTAM + \"\"\" The microframework for microservices.", ") name_index_file_msg = lambda: \"What is the name of your", "lambda: \"What is the name of your actions file?\" confirm_structure_msg", "script?\" name_actions_folder_msg = lambda: \"What is the name of your", "import stderr, stdout from enum import Enum from colored import", "after = 2 both = 3 def write_msg(msg: str, spacing:", "attr(\"reset\") colour_msg = lambda msg, colour: fg(colour) + attr(\"bold\") +", "colour: fg(colour) + attr(\"bold\") + msg + attr(\"reset\") info_msg =", "None: \"\"\"Write message to stderr\"\"\" stderr.write(\"\\n%s\\n\" % msg) welcome_msg =", "if spacing in (NewLine.after, NewLine.both) else \"\" stdout.write(\"%s%s%s\" % (prefix,", "class NewLine(Enum): before = 1 after = 2 both =", "the name of your actions file?\" confirm_structure_msg = ( lambda", "else \"\" suffix: str = \"\\n\" if spacing in (NewLine.after,", "= lambda msg, colour: fg(colour) + attr(\"bold\") + msg +", "= lambda: \"What is the name of your actions file?\"", "NewLine = None) -> None: \"\"\"Write message to stdout\"\"\" prefix:", "= 2 both = 3 def write_msg(msg: str, spacing: NewLine", "name of your actions folder?\" def create_actions_file_msg(second_run: bool): \"\"\"Actions File", "attr(\"bold\") + \"PANTAM\" + attr(\"reset\") colour_msg = lambda msg, colour:", "str = fg(\"yellow\") + attr(\"bold\") + \"PANTAM\" + attr(\"reset\") colour_msg", "= lambda: \"What is the name of your actions folder?\"", "3 def write_msg(msg: str, spacing: NewLine = None) -> None:", "\"What is the name of your actions file?\" confirm_structure_msg =", "to stderr\"\"\" stderr.write(\"\\n%s\\n\" % msg) welcome_msg = ( lambda: PANTAM", "of your actions file?\" confirm_structure_msg = ( lambda structure: \"\"\"Your", "look like this: %s Happy to proceed?\"\"\" % structure )", "= None) -> None: \"\"\"Write message to stdout\"\"\" prefix: str", "= fg(\"yellow\") + attr(\"bold\") + \"PANTAM\" + attr(\"reset\") colour_msg =", "\"\" stdout.write(\"%s%s%s\" % (prefix, msg, suffix)) def write_error(msg: str) ->", "+ attr(\"bold\") + msg + attr(\"reset\") info_msg = lambda msg:", "welcome_msg = ( lambda: PANTAM + \"\"\" The microframework for", "the name of your main script?\" name_actions_folder_msg = lambda: \"What", "str) -> None: \"\"\"Write message to stderr\"\"\" stderr.write(\"\\n%s\\n\" % msg)", "= 3 def write_msg(msg: str, spacing: NewLine = None) ->", "bool): \"\"\"Actions File Message\"\"\" article = \"another\" if second_run else", "import Enum from colored import fg, attr PANTAM: str =", "build your app... \"\"\" ) name_index_file_msg = lambda: \"What is", "<reponame>flmnt/pantam from sys import stderr, stdout from enum import Enum", "info_msg = lambda msg: colour_msg(msg, \"blue\") success_msg = lambda msg:", "your main script?\" name_actions_folder_msg = lambda: \"What is the name", "folder?\" def create_actions_file_msg(second_run: bool): \"\"\"Actions File Message\"\"\" article = \"another\"", "to create %s action file?\" % article name_actions_file_msg = lambda:", "\"green\") error_msg = lambda msg: colour_msg(msg, \"red\") class NewLine(Enum): before", "of your actions folder?\" def create_actions_file_msg(second_run: bool): \"\"\"Actions File Message\"\"\"", "def create_actions_file_msg(second_run: bool): \"\"\"Actions File Message\"\"\" article = \"another\" if", "enum import Enum from colored import fg, attr PANTAM: str", "%s action file?\" % article name_actions_file_msg = lambda: \"What is", "sys import stderr, stdout from enum import Enum from colored", "str, spacing: NewLine = None) -> None: \"\"\"Write message to", "-> None: \"\"\"Write message to stdout\"\"\" prefix: str = \"\\n\"", "\"\" suffix: str = \"\\n\" if spacing in (NewLine.after, NewLine.both)", "msg: colour_msg(msg, \"red\") class NewLine(Enum): before = 1 after =", "= lambda: \"What is the name of your main script?\"", "your actions file?\" confirm_structure_msg = ( lambda structure: \"\"\"Your application", "lambda msg: colour_msg(msg, \"blue\") success_msg = lambda msg: colour_msg(msg, \"green\")", "str = \"\\n\" if spacing in (NewLine.before, NewLine.both) else \"\"", "+ attr(\"reset\") info_msg = lambda msg: colour_msg(msg, \"blue\") success_msg =", "of your main script?\" name_actions_folder_msg = lambda: \"What is the", "microframework for microservices. Let's build your app... \"\"\" ) name_index_file_msg", "from enum import Enum from colored import fg, attr PANTAM:", "else \"\" stdout.write(\"%s%s%s\" % (prefix, msg, suffix)) def write_error(msg: str)", "will look like this: %s Happy to proceed?\"\"\" % structure", "error_msg = lambda msg: colour_msg(msg, \"red\") class NewLine(Enum): before =", "suffix)) def write_error(msg: str) -> None: \"\"\"Write message to stderr\"\"\"", "actions file?\" confirm_structure_msg = ( lambda structure: \"\"\"Your application will", "is the name of your actions file?\" confirm_structure_msg = (", "File Message\"\"\" article = \"another\" if second_run else \"an\" return" ]
[ "pod to come up and running {ret}\") return ret def", "on the pool' assert delete_cephblockpool('test-pool'), 'Failed to delete pool' ceph_cluster.cluster_health_check(timeout=0)", "1, pytest.skip( \"INVALID: Mon count should be more than one", "Running\") ceph_cluster.cluster_health_check(timeout=3) ret = pods.wait_for_resource( condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon', resource_count=3, timeout=700) log.info(f\"waited", "performed on the pool and waiting for the operator to", "come up and running {ret}\") return ret def run_io_on_pool(): \"\"\"", "the pool and waiting for the operator to create a", ") list_mons = ceph_cluster.get_mons_from_cluster() assert len(list_mons) > 1, pytest.skip( \"INVALID:", "tier4, ManageTest from ocs_ci.framework import config from ocs_ci.ocs.resources import pod", "A Testcase to remove mon from when I/O's are happening.", "from ocs_ci.framework import config from ocs_ci.ocs.resources import pod from tests.helpers", "a new mon pod on its own \"\"\" ceph_cluster =", "when I/O's are happening. Polarion-ID- OCS-355 \"\"\" import logging import", "I/O on the pool and delete the pool Returns: A", "pod on its own \"\"\" ceph_cluster = CephCluster() pods =", "on the pool and delete the pool Returns: A thread", "pod from the cluster after the I/O is performed on", "create a new mon pod on its own \"\"\" ceph_cluster", "the resource, False otherwise \"\"\" log.info(f\"Verifying all mons pods are", "timeout=700) log.info(f\"waited for all mon pod to come up and", "from ocs_ci.ocs.cluster import CephCluster from ocs_ci.utility.retry import retry from ocs_ci.ocs.exceptions", "delete_cephblockpool from ocs_ci.ocs.cluster import CephCluster from ocs_ci.utility.retry import retry from", "pool Returns: A thread of I/O \"\"\" tools_pod = pod.get_ceph_tools_pod()", "wait for the resource, False otherwise \"\"\" log.info(f\"Verifying all mons", "{ret}\") return ret def run_io_on_pool(): \"\"\" Runs the I/O on", "pool' assert delete_cephblockpool('test-pool'), 'Failed to delete pool' ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster() assert", "mon from when I/O's are happening. Polarion-ID- OCS-355 \"\"\" import", "from the cluster after the I/O is performed on the", "TestRemoveMonFromCluster(ManageTest): def test_remove_mon_pod_from_cluster(self): \"\"\" To remove mon pod from the", "pool' ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster() assert verify_mon_pod_up(ceph_cluster, pods), f\"Mon pods are not", "one to delete.\" ) assert run_io_on_pool(), 'Failed to run I/O", "OCS-355 \"\"\" import logging import pytest from ocs_ci.ocs import ocp,", "= ceph_cluster.get_mons_from_cluster() assert len(list_mons) > 1, pytest.skip( \"INVALID: Mon count", "pods), f\"Mon pods are not up and running state\" ceph_cluster.cluster_health_check(timeout=60)", "list_mons = ceph_cluster.get_mons_from_cluster() assert len(list_mons) > 1, pytest.skip( \"INVALID: Mon", "the cluster after the I/O is performed on the pool", "= logging.getLogger(__name__) @retry(CephHealthException, 8, 3, 1) def verify_mon_pod_up(ceph_cluster, pods): \"\"\"", "for the operator to create a new mon pod on", "import ocp, constants from ocs_ci.framework.testlib import tier4, ManageTest from ocs_ci.framework", "run_io_on_pool(), 'Failed to run I/O on the pool' assert delete_cephblockpool('test-pool'),", "log.info(f\"Verifying all mons pods are up and Running\") ceph_cluster.cluster_health_check(timeout=3) ret", "verify_mon_pod_up(ceph_cluster, pods): \"\"\" Verify mon pods are in Running state.", "new mon pod on its own \"\"\" ceph_cluster = CephCluster()", "bool: True for wait for the resource, False otherwise \"\"\"", ") assert run_io_on_pool(), 'Failed to run I/O on the pool'", "to come up and running {ret}\") return ret def run_io_on_pool():", "from when I/O's are happening. Polarion-ID- OCS-355 \"\"\" import logging", "than one to delete.\" ) assert run_io_on_pool(), 'Failed to run", "all mons pods are up and Running\") ceph_cluster.cluster_health_check(timeout=3) ret =", "\"\"\" Verify mon pods are in Running state. Returns: bool:", "Runs the I/O on the pool and delete the pool", "remove mon pod from the cluster after the I/O is", "and waiting for the operator to create a new mon", "pods.wait_for_resource( condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon', resource_count=3, timeout=700) log.info(f\"waited for all mon pod", "retry from ocs_ci.ocs.exceptions import CephHealthException log = logging.getLogger(__name__) @retry(CephHealthException, 8,", "ceph_cluster = CephCluster() pods = ocp.OCP( kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace'] ) list_mons", "from ocs_ci.framework.testlib import tier4, ManageTest from ocs_ci.framework import config from", "False otherwise \"\"\" log.info(f\"Verifying all mons pods are up and", "CephCluster from ocs_ci.utility.retry import retry from ocs_ci.ocs.exceptions import CephHealthException log", "\"\"\" tools_pod = pod.get_ceph_tools_pod() tools_pod.add_role(role='client') return run_io_with_rados_bench( ceph_pods=[tools_pod], config={'time': 45,", "False, 'pool': 'test-pool' } ) @tier4 @pytest.mark.polarion_id(\"OCS-355\") class TestRemoveMonFromCluster(ManageTest): def", "logging import pytest from ocs_ci.ocs import ocp, constants from ocs_ci.framework.testlib", "\"\"\" A Testcase to remove mon from when I/O's are", "import tier4, ManageTest from ocs_ci.framework import config from ocs_ci.ocs.resources import", "are happening. Polarion-ID- OCS-355 \"\"\" import logging import pytest from", "remove mon from when I/O's are happening. Polarion-ID- OCS-355 \"\"\"", "Returns: bool: True for wait for the resource, False otherwise", "Verify mon pods are in Running state. Returns: bool: True", "pods are up and Running\") ceph_cluster.cluster_health_check(timeout=3) ret = pods.wait_for_resource( condition=constants.STATUS_RUNNING,", "tools_pod = pod.get_ceph_tools_pod() tools_pod.add_role(role='client') return run_io_with_rados_bench( ceph_pods=[tools_pod], config={'time': 45, 'cleanup':", "be more than one to delete.\" ) assert run_io_on_pool(), 'Failed", "ManageTest from ocs_ci.framework import config from ocs_ci.ocs.resources import pod from", "class TestRemoveMonFromCluster(ManageTest): def test_remove_mon_pod_from_cluster(self): \"\"\" To remove mon pod from", "running {ret}\") return ret def run_io_on_pool(): \"\"\" Runs the I/O", "happening. Polarion-ID- OCS-355 \"\"\" import logging import pytest from ocs_ci.ocs", "the I/O on the pool and delete the pool Returns:", "'Failed to delete pool' ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster() assert verify_mon_pod_up(ceph_cluster, pods), f\"Mon", "ceph_cluster.cluster_health_check(timeout=3) ret = pods.wait_for_resource( condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon', resource_count=3, timeout=700) log.info(f\"waited for", "def test_remove_mon_pod_from_cluster(self): \"\"\" To remove mon pod from the cluster", "in Running state. Returns: bool: True for wait for the", "log = logging.getLogger(__name__) @retry(CephHealthException, 8, 3, 1) def verify_mon_pod_up(ceph_cluster, pods):", "more than one to delete.\" ) assert run_io_on_pool(), 'Failed to", "delete_cephblockpool('test-pool'), 'Failed to delete pool' ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster() assert verify_mon_pod_up(ceph_cluster, pods),", "assert len(list_mons) > 1, pytest.skip( \"INVALID: Mon count should be", "\"\"\" import logging import pytest from ocs_ci.ocs import ocp, constants", "all mon pod to come up and running {ret}\") return", "'cleanup': False, 'pool': 'test-pool' } ) @tier4 @pytest.mark.polarion_id(\"OCS-355\") class TestRemoveMonFromCluster(ManageTest):", "return ret def run_io_on_pool(): \"\"\" Runs the I/O on the", "ocs_ci.ocs import ocp, constants from ocs_ci.framework.testlib import tier4, ManageTest from", "CephCluster() pods = ocp.OCP( kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace'] ) list_mons = ceph_cluster.get_mons_from_cluster()", "\"\"\" To remove mon pod from the cluster after the", "pod.get_ceph_tools_pod() tools_pod.add_role(role='client') return run_io_with_rados_bench( ceph_pods=[tools_pod], config={'time': 45, 'cleanup': False, 'pool':", "selector='app=rook-ceph-mon', resource_count=3, timeout=700) log.info(f\"waited for all mon pod to come", "> 1, pytest.skip( \"INVALID: Mon count should be more than", "def verify_mon_pod_up(ceph_cluster, pods): \"\"\" Verify mon pods are in Running", "kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace'] ) list_mons = ceph_cluster.get_mons_from_cluster() assert len(list_mons) > 1,", "test_remove_mon_pod_from_cluster(self): \"\"\" To remove mon pod from the cluster after", "up and Running\") ceph_cluster.cluster_health_check(timeout=3) ret = pods.wait_for_resource( condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon', resource_count=3,", "count should be more than one to delete.\" ) assert", "the I/O is performed on the pool and waiting for", "the pool' assert delete_cephblockpool('test-pool'), 'Failed to delete pool' ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster()", "mon pods are in Running state. Returns: bool: True for", "for all mon pod to come up and running {ret}\")", "pool and waiting for the operator to create a new", "mon pod from the cluster after the I/O is performed", "to delete pool' ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster() assert verify_mon_pod_up(ceph_cluster, pods), f\"Mon pods", "Running state. Returns: bool: True for wait for the resource,", "config={'time': 45, 'cleanup': False, 'pool': 'test-pool' } ) @tier4 @pytest.mark.polarion_id(\"OCS-355\")", "import pod from tests.helpers import run_io_with_rados_bench, delete_cephblockpool from ocs_ci.ocs.cluster import", "to delete.\" ) assert run_io_on_pool(), 'Failed to run I/O on", "run I/O on the pool' assert delete_cephblockpool('test-pool'), 'Failed to delete", "config from ocs_ci.ocs.resources import pod from tests.helpers import run_io_with_rados_bench, delete_cephblockpool", "Testcase to remove mon from when I/O's are happening. Polarion-ID-", "tools_pod.add_role(role='client') return run_io_with_rados_bench( ceph_pods=[tools_pod], config={'time': 45, 'cleanup': False, 'pool': 'test-pool'", "on the pool and waiting for the operator to create", "from ocs_ci.ocs import ocp, constants from ocs_ci.framework.testlib import tier4, ManageTest", "cluster after the I/O is performed on the pool and", "assert delete_cephblockpool('test-pool'), 'Failed to delete pool' ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster() assert verify_mon_pod_up(ceph_cluster,", "len(list_mons) > 1, pytest.skip( \"INVALID: Mon count should be more", "are in Running state. Returns: bool: True for wait for", "I/O's are happening. Polarion-ID- OCS-355 \"\"\" import logging import pytest", "and delete the pool Returns: A thread of I/O \"\"\"", "tests.helpers import run_io_with_rados_bench, delete_cephblockpool from ocs_ci.ocs.cluster import CephCluster from ocs_ci.utility.retry", "of I/O \"\"\" tools_pod = pod.get_ceph_tools_pod() tools_pod.add_role(role='client') return run_io_with_rados_bench( ceph_pods=[tools_pod],", "'pool': 'test-pool' } ) @tier4 @pytest.mark.polarion_id(\"OCS-355\") class TestRemoveMonFromCluster(ManageTest): def test_remove_mon_pod_from_cluster(self):", "I/O on the pool' assert delete_cephblockpool('test-pool'), 'Failed to delete pool'", "log.info(f\"waited for all mon pod to come up and running", "to create a new mon pod on its own \"\"\"", "pool and delete the pool Returns: A thread of I/O", "pods): \"\"\" Verify mon pods are in Running state. Returns:", "ret = pods.wait_for_resource( condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon', resource_count=3, timeout=700) log.info(f\"waited for all", "import CephHealthException log = logging.getLogger(__name__) @retry(CephHealthException, 8, 3, 1) def", "and running {ret}\") return ret def run_io_on_pool(): \"\"\" Runs the", "operator to create a new mon pod on its own", "import run_io_with_rados_bench, delete_cephblockpool from ocs_ci.ocs.cluster import CephCluster from ocs_ci.utility.retry import", "own \"\"\" ceph_cluster = CephCluster() pods = ocp.OCP( kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace']", "to remove mon from when I/O's are happening. Polarion-ID- OCS-355", "ocs_ci.ocs.resources import pod from tests.helpers import run_io_with_rados_bench, delete_cephblockpool from ocs_ci.ocs.cluster", "logging.getLogger(__name__) @retry(CephHealthException, 8, 3, 1) def verify_mon_pod_up(ceph_cluster, pods): \"\"\" Verify", "for wait for the resource, False otherwise \"\"\" log.info(f\"Verifying all", "import retry from ocs_ci.ocs.exceptions import CephHealthException log = logging.getLogger(__name__) @retry(CephHealthException,", "delete pool' ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster() assert verify_mon_pod_up(ceph_cluster, pods), f\"Mon pods are", "= pods.wait_for_resource( condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon', resource_count=3, timeout=700) log.info(f\"waited for all mon", "resource_count=3, timeout=700) log.info(f\"waited for all mon pod to come up", "from ocs_ci.ocs.resources import pod from tests.helpers import run_io_with_rados_bench, delete_cephblockpool from", "ceph_cluster.get_mons_from_cluster() assert len(list_mons) > 1, pytest.skip( \"INVALID: Mon count should", "\"INVALID: Mon count should be more than one to delete.\"", "\"\"\" ceph_cluster = CephCluster() pods = ocp.OCP( kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace'] )", "mons pods are up and Running\") ceph_cluster.cluster_health_check(timeout=3) ret = pods.wait_for_resource(", "assert run_io_on_pool(), 'Failed to run I/O on the pool' assert", "\"\"\" log.info(f\"Verifying all mons pods are up and Running\") ceph_cluster.cluster_health_check(timeout=3)", "ceph_pods=[tools_pod], config={'time': 45, 'cleanup': False, 'pool': 'test-pool' } ) @tier4", "pods = ocp.OCP( kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace'] ) list_mons = ceph_cluster.get_mons_from_cluster() assert", "pods are in Running state. Returns: bool: True for wait", "'Failed to run I/O on the pool' assert delete_cephblockpool('test-pool'), 'Failed", "its own \"\"\" ceph_cluster = CephCluster() pods = ocp.OCP( kind=constants.POD,", "ceph_cluster.cluster_health_check(timeout=0) ceph_cluster.remove_mon_from_cluster() assert verify_mon_pod_up(ceph_cluster, pods), f\"Mon pods are not up", "def run_io_on_pool(): \"\"\" Runs the I/O on the pool and", "45, 'cleanup': False, 'pool': 'test-pool' } ) @tier4 @pytest.mark.polarion_id(\"OCS-355\") class", "import pytest from ocs_ci.ocs import ocp, constants from ocs_ci.framework.testlib import", "ceph_cluster.remove_mon_from_cluster() assert verify_mon_pod_up(ceph_cluster, pods), f\"Mon pods are not up and", "run_io_with_rados_bench, delete_cephblockpool from ocs_ci.ocs.cluster import CephCluster from ocs_ci.utility.retry import retry", "from ocs_ci.ocs.exceptions import CephHealthException log = logging.getLogger(__name__) @retry(CephHealthException, 8, 3,", "the operator to create a new mon pod on its", "should be more than one to delete.\" ) assert run_io_on_pool(),", "the pool Returns: A thread of I/O \"\"\" tools_pod =", "after the I/O is performed on the pool and waiting", "ocp.OCP( kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace'] ) list_mons = ceph_cluster.get_mons_from_cluster() assert len(list_mons) >", "CephHealthException log = logging.getLogger(__name__) @retry(CephHealthException, 8, 3, 1) def verify_mon_pod_up(ceph_cluster,", "the pool and delete the pool Returns: A thread of", "ocs_ci.ocs.cluster import CephCluster from ocs_ci.utility.retry import retry from ocs_ci.ocs.exceptions import", "'test-pool' } ) @tier4 @pytest.mark.polarion_id(\"OCS-355\") class TestRemoveMonFromCluster(ManageTest): def test_remove_mon_pod_from_cluster(self): \"\"\"", "mon pod on its own \"\"\" ceph_cluster = CephCluster() pods", "pytest.skip( \"INVALID: Mon count should be more than one to", "assert verify_mon_pod_up(ceph_cluster, pods), f\"Mon pods are not up and running", "3, 1) def verify_mon_pod_up(ceph_cluster, pods): \"\"\" Verify mon pods are", "= ocp.OCP( kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace'] ) list_mons = ceph_cluster.get_mons_from_cluster() assert len(list_mons)", "} ) @tier4 @pytest.mark.polarion_id(\"OCS-355\") class TestRemoveMonFromCluster(ManageTest): def test_remove_mon_pod_from_cluster(self): \"\"\" To", "@tier4 @pytest.mark.polarion_id(\"OCS-355\") class TestRemoveMonFromCluster(ManageTest): def test_remove_mon_pod_from_cluster(self): \"\"\" To remove mon", "from ocs_ci.utility.retry import retry from ocs_ci.ocs.exceptions import CephHealthException log =", "import config from ocs_ci.ocs.resources import pod from tests.helpers import run_io_with_rados_bench,", "1) def verify_mon_pod_up(ceph_cluster, pods): \"\"\" Verify mon pods are in", "constants from ocs_ci.framework.testlib import tier4, ManageTest from ocs_ci.framework import config", "ocs_ci.framework.testlib import tier4, ManageTest from ocs_ci.framework import config from ocs_ci.ocs.resources", "up and running {ret}\") return ret def run_io_on_pool(): \"\"\" Runs", "Returns: A thread of I/O \"\"\" tools_pod = pod.get_ceph_tools_pod() tools_pod.add_role(role='client')", "thread of I/O \"\"\" tools_pod = pod.get_ceph_tools_pod() tools_pod.add_role(role='client') return run_io_with_rados_bench(", "verify_mon_pod_up(ceph_cluster, pods), f\"Mon pods are not up and running state\"", "pytest from ocs_ci.ocs import ocp, constants from ocs_ci.framework.testlib import tier4,", "from tests.helpers import run_io_with_rados_bench, delete_cephblockpool from ocs_ci.ocs.cluster import CephCluster from", "otherwise \"\"\" log.info(f\"Verifying all mons pods are up and Running\")", "on its own \"\"\" ceph_cluster = CephCluster() pods = ocp.OCP(", "delete the pool Returns: A thread of I/O \"\"\" tools_pod", "To remove mon pod from the cluster after the I/O", "mon pod to come up and running {ret}\") return ret", "and Running\") ceph_cluster.cluster_health_check(timeout=3) ret = pods.wait_for_resource( condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon', resource_count=3, timeout=700)", "condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon', resource_count=3, timeout=700) log.info(f\"waited for all mon pod to", "I/O \"\"\" tools_pod = pod.get_ceph_tools_pod() tools_pod.add_role(role='client') return run_io_with_rados_bench( ceph_pods=[tools_pod], config={'time':", "waiting for the operator to create a new mon pod", "I/O is performed on the pool and waiting for the", "resource, False otherwise \"\"\" log.info(f\"Verifying all mons pods are up", "state. Returns: bool: True for wait for the resource, False", "import CephCluster from ocs_ci.utility.retry import retry from ocs_ci.ocs.exceptions import CephHealthException", "for the resource, False otherwise \"\"\" log.info(f\"Verifying all mons pods", "are up and Running\") ceph_cluster.cluster_health_check(timeout=3) ret = pods.wait_for_resource( condition=constants.STATUS_RUNNING, selector='app=rook-ceph-mon',", "run_io_on_pool(): \"\"\" Runs the I/O on the pool and delete", "delete.\" ) assert run_io_on_pool(), 'Failed to run I/O on the", "run_io_with_rados_bench( ceph_pods=[tools_pod], config={'time': 45, 'cleanup': False, 'pool': 'test-pool' } )", ") @tier4 @pytest.mark.polarion_id(\"OCS-355\") class TestRemoveMonFromCluster(ManageTest): def test_remove_mon_pod_from_cluster(self): \"\"\" To remove", "= pod.get_ceph_tools_pod() tools_pod.add_role(role='client') return run_io_with_rados_bench( ceph_pods=[tools_pod], config={'time': 45, 'cleanup': False,", "pod from tests.helpers import run_io_with_rados_bench, delete_cephblockpool from ocs_ci.ocs.cluster import CephCluster", "ocs_ci.ocs.exceptions import CephHealthException log = logging.getLogger(__name__) @retry(CephHealthException, 8, 3, 1)", "\"\"\" Runs the I/O on the pool and delete the", "True for wait for the resource, False otherwise \"\"\" log.info(f\"Verifying", "is performed on the pool and waiting for the operator", "A thread of I/O \"\"\" tools_pod = pod.get_ceph_tools_pod() tools_pod.add_role(role='client') return", "return run_io_with_rados_bench( ceph_pods=[tools_pod], config={'time': 45, 'cleanup': False, 'pool': 'test-pool' }", "Mon count should be more than one to delete.\" )", "Polarion-ID- OCS-355 \"\"\" import logging import pytest from ocs_ci.ocs import", "ret def run_io_on_pool(): \"\"\" Runs the I/O on the pool", "import logging import pytest from ocs_ci.ocs import ocp, constants from", "to run I/O on the pool' assert delete_cephblockpool('test-pool'), 'Failed to", "@pytest.mark.polarion_id(\"OCS-355\") class TestRemoveMonFromCluster(ManageTest): def test_remove_mon_pod_from_cluster(self): \"\"\" To remove mon pod", "8, 3, 1) def verify_mon_pod_up(ceph_cluster, pods): \"\"\" Verify mon pods", "@retry(CephHealthException, 8, 3, 1) def verify_mon_pod_up(ceph_cluster, pods): \"\"\" Verify mon", "ocs_ci.utility.retry import retry from ocs_ci.ocs.exceptions import CephHealthException log = logging.getLogger(__name__)", "ocs_ci.framework import config from ocs_ci.ocs.resources import pod from tests.helpers import", "ocp, constants from ocs_ci.framework.testlib import tier4, ManageTest from ocs_ci.framework import", "namespace=config.ENV_DATA['cluster_namespace'] ) list_mons = ceph_cluster.get_mons_from_cluster() assert len(list_mons) > 1, pytest.skip(", "= CephCluster() pods = ocp.OCP( kind=constants.POD, namespace=config.ENV_DATA['cluster_namespace'] ) list_mons =" ]
[ "\"\"\" self.sender = sender self.serializer = serializer def send(self, lookup):", "'exclude_states', self.build_filter_string(lookup.exclude)) self.add_parameter(request, 'prefer_cities', self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request, 'prefer_states', self.build_filter_string(lookup.prefer_states)) self.add_parameter(request, 'prefer_zip_codes',", "lookup.search) self.add_parameter(request, 'max_results', lookup.max_results) self.add_parameter(request, 'include_only_cities', self.build_filter_string(lookup.city_filter)) self.add_parameter(request, 'include_only_states', self.build_filter_string(lookup.state_filter))", "def add_parameter(request, key, value): if value and value != 'none':", "= self.build_request(lookup) response = self.sender.send(request) if response.error: raise response.error result", "SmartyException('Send() must be passed a Lookup with the search field", "self.add_parameter(request, 'prefer_ratio', lookup.prefer_ratio) self.add_parameter(request, 'prefer_geolocation', lookup.prefer_geo) self.add_parameter(request, 'selected', lookup.selected) return", "response = self.sender.send(request) if response.error: raise response.error result = self.serializer.deserialize(response.payload)", "response.error result = self.serializer.deserialize(response.payload) suggestions = self.convert_suggestions(result.get('suggestions') or []) lookup.result", "suggestions = self.convert_suggestions(result.get('suggestions') or []) lookup.result = suggestions return suggestions", "not lookup.search: raise SmartyException('Send() must be passed a Lookup with", "return suggestions def build_request(self, lookup): request = Request() self.add_parameter(request, 'search',", "must be passed a Lookup with the search field set.')", "with the search field set.') request = self.build_request(lookup) response =", "self.add_parameter(request, 'include_only_zip_codes', self.build_filter_string(lookup.zip_filter)) self.add_parameter(request, 'exclude_states', self.build_filter_string(lookup.exclude)) self.add_parameter(request, 'prefer_cities', self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request,", "add_parameter(request, key, value): if value and value != 'none': request.parameters[key]", "or []) or None @staticmethod def convert_suggestions(suggestion_dictionaries): return [Suggestion(suggestion) for", "from smartystreets_python_sdk.exceptions import SmartyException from smartystreets_python_sdk.us_autocomplete_pro import Suggestion, geolocation_type class", "suggestion_dictionaries] @staticmethod def add_parameter(request, key, value): if value and value", "import Request from smartystreets_python_sdk.exceptions import SmartyException from smartystreets_python_sdk.us_autocomplete_pro import Suggestion,", "'prefer_states', self.build_filter_string(lookup.prefer_states)) self.add_parameter(request, 'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request, 'prefer_ratio', lookup.prefer_ratio) self.add_parameter(request, 'prefer_geolocation',", "self.sender = sender self.serializer = serializer def send(self, lookup): \"\"\"", "def build_filter_string(filter_list): return ','.join(filter_list or []) or None @staticmethod def", "'prefer_ratio', lookup.prefer_ratio) self.add_parameter(request, 'prefer_geolocation', lookup.prefer_geo) self.add_parameter(request, 'selected', lookup.selected) return request", "return request @staticmethod def build_filter_string(filter_list): return ','.join(filter_list or []) or", "convert_suggestions(suggestion_dictionaries): return [Suggestion(suggestion) for suggestion in suggestion_dictionaries] @staticmethod def add_parameter(request,", "not lookup or not lookup.search: raise SmartyException('Send() must be passed", "result field. \"\"\" if not lookup or not lookup.search: raise", "send(self, lookup): \"\"\" Sends a Lookup object to the US", "request = self.build_request(lookup) response = self.sender.send(request) if response.error: raise response.error", "Autocomplete Pro API and stores the result in the Lookup's", "lookup): request = Request() self.add_parameter(request, 'search', lookup.search) self.add_parameter(request, 'max_results', lookup.max_results)", "'include_only_states', self.build_filter_string(lookup.state_filter)) self.add_parameter(request, 'include_only_zip_codes', self.build_filter_string(lookup.zip_filter)) self.add_parameter(request, 'exclude_states', self.build_filter_string(lookup.exclude)) self.add_parameter(request, 'prefer_cities',", "class Client: def __init__(self, sender, serializer): \"\"\" It is recommended", "US Autocomplete Pro API and stores the result in the", "self.add_parameter(request, 'selected', lookup.selected) return request @staticmethod def build_filter_string(filter_list): return ','.join(filter_list", "stores the result in the Lookup's result field. \"\"\" if", "self.build_request(lookup) response = self.sender.send(request) if response.error: raise response.error result =", "lookup.max_results) self.add_parameter(request, 'include_only_cities', self.build_filter_string(lookup.city_filter)) self.add_parameter(request, 'include_only_states', self.build_filter_string(lookup.state_filter)) self.add_parameter(request, 'include_only_zip_codes', self.build_filter_string(lookup.zip_filter))", "<reponame>Caaz/smartystreets-python-sdk<filename>smartystreets_python_sdk/us_autocomplete_pro/client.py from smartystreets_python_sdk import Request from smartystreets_python_sdk.exceptions import SmartyException from", "= sender self.serializer = serializer def send(self, lookup): \"\"\" Sends", "sender self.serializer = serializer def send(self, lookup): \"\"\" Sends a", "or not lookup.search: raise SmartyException('Send() must be passed a Lookup", "a Lookup with the search field set.') request = self.build_request(lookup)", "= Request() self.add_parameter(request, 'search', lookup.search) self.add_parameter(request, 'max_results', lookup.max_results) self.add_parameter(request, 'include_only_cities',", "is recommended to instantiate this class using ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\" self.sender", "def send(self, lookup): \"\"\" Sends a Lookup object to the", "return ','.join(filter_list or []) or None @staticmethod def convert_suggestions(suggestion_dictionaries): return", "self.convert_suggestions(result.get('suggestions') or []) lookup.result = suggestions return suggestions def build_request(self,", "this class using ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\" self.sender = sender self.serializer =", "self.add_parameter(request, 'prefer_states', self.build_filter_string(lookup.prefer_states)) self.add_parameter(request, 'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request, 'prefer_ratio', lookup.prefer_ratio) self.add_parameter(request,", "self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request, 'prefer_states', self.build_filter_string(lookup.prefer_states)) self.add_parameter(request, 'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request, 'prefer_ratio', lookup.prefer_ratio)", "if response.error: raise response.error result = self.serializer.deserialize(response.payload) suggestions = self.convert_suggestions(result.get('suggestions')", "def convert_suggestions(suggestion_dictionaries): return [Suggestion(suggestion) for suggestion in suggestion_dictionaries] @staticmethod def", "field set.') request = self.build_request(lookup) response = self.sender.send(request) if response.error:", "instantiate this class using ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\" self.sender = sender self.serializer", "or None @staticmethod def convert_suggestions(suggestion_dictionaries): return [Suggestion(suggestion) for suggestion in", "lookup or not lookup.search: raise SmartyException('Send() must be passed a", "\"\"\" Sends a Lookup object to the US Autocomplete Pro", "self.build_filter_string(lookup.prefer_states)) self.add_parameter(request, 'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request, 'prefer_ratio', lookup.prefer_ratio) self.add_parameter(request, 'prefer_geolocation', lookup.prefer_geo)", "for suggestion in suggestion_dictionaries] @staticmethod def add_parameter(request, key, value): if", "= serializer def send(self, lookup): \"\"\" Sends a Lookup object", "= suggestions return suggestions def build_request(self, lookup): request = Request()", "the result in the Lookup's result field. \"\"\" if not", "Lookup object to the US Autocomplete Pro API and stores", "response.error: raise response.error result = self.serializer.deserialize(response.payload) suggestions = self.convert_suggestions(result.get('suggestions') or", "self.add_parameter(request, 'search', lookup.search) self.add_parameter(request, 'max_results', lookup.max_results) self.add_parameter(request, 'include_only_cities', self.build_filter_string(lookup.city_filter)) self.add_parameter(request,", "return [Suggestion(suggestion) for suggestion in suggestion_dictionaries] @staticmethod def add_parameter(request, key,", "','.join(filter_list or []) or None @staticmethod def convert_suggestions(suggestion_dictionaries): return [Suggestion(suggestion)", "[Suggestion(suggestion) for suggestion in suggestion_dictionaries] @staticmethod def add_parameter(request, key, value):", "suggestions def build_request(self, lookup): request = Request() self.add_parameter(request, 'search', lookup.search)", "Sends a Lookup object to the US Autocomplete Pro API", "lookup.search: raise SmartyException('Send() must be passed a Lookup with the", "raise SmartyException('Send() must be passed a Lookup with the search", "Suggestion, geolocation_type class Client: def __init__(self, sender, serializer): \"\"\" It", "self.build_filter_string(lookup.city_filter)) self.add_parameter(request, 'include_only_states', self.build_filter_string(lookup.state_filter)) self.add_parameter(request, 'include_only_zip_codes', self.build_filter_string(lookup.zip_filter)) self.add_parameter(request, 'exclude_states', self.build_filter_string(lookup.exclude))", "self.build_filter_string(lookup.zip_filter)) self.add_parameter(request, 'exclude_states', self.build_filter_string(lookup.exclude)) self.add_parameter(request, 'prefer_cities', self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request, 'prefer_states', self.build_filter_string(lookup.prefer_states))", "raise response.error result = self.serializer.deserialize(response.payload) suggestions = self.convert_suggestions(result.get('suggestions') or [])", "in the Lookup's result field. \"\"\" if not lookup or", "the US Autocomplete Pro API and stores the result in", "be passed a Lookup with the search field set.') request", "Pro API and stores the result in the Lookup's result", "self.add_parameter(request, 'include_only_states', self.build_filter_string(lookup.state_filter)) self.add_parameter(request, 'include_only_zip_codes', self.build_filter_string(lookup.zip_filter)) self.add_parameter(request, 'exclude_states', self.build_filter_string(lookup.exclude)) self.add_parameter(request,", "lookup.prefer_ratio) self.add_parameter(request, 'prefer_geolocation', lookup.prefer_geo) self.add_parameter(request, 'selected', lookup.selected) return request @staticmethod", "= self.convert_suggestions(result.get('suggestions') or []) lookup.result = suggestions return suggestions def", "self.add_parameter(request, 'prefer_cities', self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request, 'prefer_states', self.build_filter_string(lookup.prefer_states)) self.add_parameter(request, 'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request,", "value): if value and value != 'none': request.parameters[key] = value", "key, value): if value and value != 'none': request.parameters[key] =", "SmartyException from smartystreets_python_sdk.us_autocomplete_pro import Suggestion, geolocation_type class Client: def __init__(self,", "a Lookup object to the US Autocomplete Pro API and", "build_filter_string(filter_list): return ','.join(filter_list or []) or None @staticmethod def convert_suggestions(suggestion_dictionaries):", "if not lookup or not lookup.search: raise SmartyException('Send() must be", "sender, serializer): \"\"\" It is recommended to instantiate this class", "to the US Autocomplete Pro API and stores the result", "'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request, 'prefer_ratio', lookup.prefer_ratio) self.add_parameter(request, 'prefer_geolocation', lookup.prefer_geo) self.add_parameter(request, 'selected',", "self.sender.send(request) if response.error: raise response.error result = self.serializer.deserialize(response.payload) suggestions =", "\"\"\" It is recommended to instantiate this class using ClientBuilder.build_us_autocomplete_pro_api_client()", "= self.serializer.deserialize(response.payload) suggestions = self.convert_suggestions(result.get('suggestions') or []) lookup.result = suggestions", "= self.sender.send(request) if response.error: raise response.error result = self.serializer.deserialize(response.payload) suggestions", "self.add_parameter(request, 'prefer_geolocation', lookup.prefer_geo) self.add_parameter(request, 'selected', lookup.selected) return request @staticmethod def", "'prefer_geolocation', lookup.prefer_geo) self.add_parameter(request, 'selected', lookup.selected) return request @staticmethod def build_filter_string(filter_list):", "or []) lookup.result = suggestions return suggestions def build_request(self, lookup):", "'include_only_zip_codes', self.build_filter_string(lookup.zip_filter)) self.add_parameter(request, 'exclude_states', self.build_filter_string(lookup.exclude)) self.add_parameter(request, 'prefer_cities', self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request, 'prefer_states',", "self.serializer = serializer def send(self, lookup): \"\"\" Sends a Lookup", "and stores the result in the Lookup's result field. \"\"\"", "lookup.result = suggestions return suggestions def build_request(self, lookup): request =", "ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\" self.sender = sender self.serializer = serializer def send(self,", "result in the Lookup's result field. \"\"\" if not lookup", "'prefer_cities', self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request, 'prefer_states', self.build_filter_string(lookup.prefer_states)) self.add_parameter(request, 'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request, 'prefer_ratio',", "def build_request(self, lookup): request = Request() self.add_parameter(request, 'search', lookup.search) self.add_parameter(request,", "to instantiate this class using ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\" self.sender = sender", "@staticmethod def build_filter_string(filter_list): return ','.join(filter_list or []) or None @staticmethod", "self.build_filter_string(lookup.state_filter)) self.add_parameter(request, 'include_only_zip_codes', self.build_filter_string(lookup.zip_filter)) self.add_parameter(request, 'exclude_states', self.build_filter_string(lookup.exclude)) self.add_parameter(request, 'prefer_cities', self.build_filter_string(lookup.prefer_cities))", "using ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\" self.sender = sender self.serializer = serializer def", "[]) or None @staticmethod def convert_suggestions(suggestion_dictionaries): return [Suggestion(suggestion) for suggestion", "[]) lookup.result = suggestions return suggestions def build_request(self, lookup): request", "self.build_filter_string(lookup.exclude)) self.add_parameter(request, 'prefer_cities', self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request, 'prefer_states', self.build_filter_string(lookup.prefer_states)) self.add_parameter(request, 'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips))", "serializer): \"\"\" It is recommended to instantiate this class using", "smartystreets_python_sdk import Request from smartystreets_python_sdk.exceptions import SmartyException from smartystreets_python_sdk.us_autocomplete_pro import", "import SmartyException from smartystreets_python_sdk.us_autocomplete_pro import Suggestion, geolocation_type class Client: def", "field. \"\"\" if not lookup or not lookup.search: raise SmartyException('Send()", "object to the US Autocomplete Pro API and stores the", "serializer def send(self, lookup): \"\"\" Sends a Lookup object to", "request = Request() self.add_parameter(request, 'search', lookup.search) self.add_parameter(request, 'max_results', lookup.max_results) self.add_parameter(request,", "API and stores the result in the Lookup's result field.", "self.add_parameter(request, 'exclude_states', self.build_filter_string(lookup.exclude)) self.add_parameter(request, 'prefer_cities', self.build_filter_string(lookup.prefer_cities)) self.add_parameter(request, 'prefer_states', self.build_filter_string(lookup.prefer_states)) self.add_parameter(request,", "Client: def __init__(self, sender, serializer): \"\"\" It is recommended to", "from smartystreets_python_sdk.us_autocomplete_pro import Suggestion, geolocation_type class Client: def __init__(self, sender,", "suggestion in suggestion_dictionaries] @staticmethod def add_parameter(request, key, value): if value", "import Suggestion, geolocation_type class Client: def __init__(self, sender, serializer): \"\"\"", "the search field set.') request = self.build_request(lookup) response = self.sender.send(request)", "from smartystreets_python_sdk import Request from smartystreets_python_sdk.exceptions import SmartyException from smartystreets_python_sdk.us_autocomplete_pro", "It is recommended to instantiate this class using ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\"", "lookup): \"\"\" Sends a Lookup object to the US Autocomplete", "set.') request = self.build_request(lookup) response = self.sender.send(request) if response.error: raise", "'max_results', lookup.max_results) self.add_parameter(request, 'include_only_cities', self.build_filter_string(lookup.city_filter)) self.add_parameter(request, 'include_only_states', self.build_filter_string(lookup.state_filter)) self.add_parameter(request, 'include_only_zip_codes',", "the Lookup's result field. \"\"\" if not lookup or not", "passed a Lookup with the search field set.') request =", "'search', lookup.search) self.add_parameter(request, 'max_results', lookup.max_results) self.add_parameter(request, 'include_only_cities', self.build_filter_string(lookup.city_filter)) self.add_parameter(request, 'include_only_states',", "None @staticmethod def convert_suggestions(suggestion_dictionaries): return [Suggestion(suggestion) for suggestion in suggestion_dictionaries]", "build_request(self, lookup): request = Request() self.add_parameter(request, 'search', lookup.search) self.add_parameter(request, 'max_results',", "Request from smartystreets_python_sdk.exceptions import SmartyException from smartystreets_python_sdk.us_autocomplete_pro import Suggestion, geolocation_type", "@staticmethod def convert_suggestions(suggestion_dictionaries): return [Suggestion(suggestion) for suggestion in suggestion_dictionaries] @staticmethod", "in suggestion_dictionaries] @staticmethod def add_parameter(request, key, value): if value and", "geolocation_type class Client: def __init__(self, sender, serializer): \"\"\" It is", "result = self.serializer.deserialize(response.payload) suggestions = self.convert_suggestions(result.get('suggestions') or []) lookup.result =", "self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request, 'prefer_ratio', lookup.prefer_ratio) self.add_parameter(request, 'prefer_geolocation', lookup.prefer_geo) self.add_parameter(request, 'selected', lookup.selected)", "def __init__(self, sender, serializer): \"\"\" It is recommended to instantiate", "self.serializer.deserialize(response.payload) suggestions = self.convert_suggestions(result.get('suggestions') or []) lookup.result = suggestions return", "recommended to instantiate this class using ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\" self.sender =", "suggestions return suggestions def build_request(self, lookup): request = Request() self.add_parameter(request,", "\"\"\" if not lookup or not lookup.search: raise SmartyException('Send() must", "lookup.selected) return request @staticmethod def build_filter_string(filter_list): return ','.join(filter_list or [])", "search field set.') request = self.build_request(lookup) response = self.sender.send(request) if", "request @staticmethod def build_filter_string(filter_list): return ','.join(filter_list or []) or None", "smartystreets_python_sdk.us_autocomplete_pro import Suggestion, geolocation_type class Client: def __init__(self, sender, serializer):", "Lookup with the search field set.') request = self.build_request(lookup) response", "smartystreets_python_sdk.exceptions import SmartyException from smartystreets_python_sdk.us_autocomplete_pro import Suggestion, geolocation_type class Client:", "'selected', lookup.selected) return request @staticmethod def build_filter_string(filter_list): return ','.join(filter_list or", "self.add_parameter(request, 'prefer_zip_codes', self.build_filter_string(lookup.prefer_zips)) self.add_parameter(request, 'prefer_ratio', lookup.prefer_ratio) self.add_parameter(request, 'prefer_geolocation', lookup.prefer_geo) self.add_parameter(request,", "__init__(self, sender, serializer): \"\"\" It is recommended to instantiate this", "class using ClientBuilder.build_us_autocomplete_pro_api_client() \"\"\" self.sender = sender self.serializer = serializer", "lookup.prefer_geo) self.add_parameter(request, 'selected', lookup.selected) return request @staticmethod def build_filter_string(filter_list): return", "'include_only_cities', self.build_filter_string(lookup.city_filter)) self.add_parameter(request, 'include_only_states', self.build_filter_string(lookup.state_filter)) self.add_parameter(request, 'include_only_zip_codes', self.build_filter_string(lookup.zip_filter)) self.add_parameter(request, 'exclude_states',", "Lookup's result field. \"\"\" if not lookup or not lookup.search:", "self.add_parameter(request, 'max_results', lookup.max_results) self.add_parameter(request, 'include_only_cities', self.build_filter_string(lookup.city_filter)) self.add_parameter(request, 'include_only_states', self.build_filter_string(lookup.state_filter)) self.add_parameter(request,", "self.add_parameter(request, 'include_only_cities', self.build_filter_string(lookup.city_filter)) self.add_parameter(request, 'include_only_states', self.build_filter_string(lookup.state_filter)) self.add_parameter(request, 'include_only_zip_codes', self.build_filter_string(lookup.zip_filter)) self.add_parameter(request,", "Request() self.add_parameter(request, 'search', lookup.search) self.add_parameter(request, 'max_results', lookup.max_results) self.add_parameter(request, 'include_only_cities', self.build_filter_string(lookup.city_filter))", "@staticmethod def add_parameter(request, key, value): if value and value !=" ]
[ "def _get_sql_files_from_dir(self, exclude_list=[]): \"\"\"Get all script files from directory\"\"\" _exclude_list", "patches table\"\"\" patches = self.get_pending_patches() for patch in patches: self.logger.info('Add", "if parser_args.debug else logging.INFO) logger.addHandler(ch) # database handle sqlvc =", "e.message or e if e.clsException.InnerException is not None and e.clsException.InnerException.InnerException", "[_patch_history] (id int not null identity(1, 1), name varchar(100) not", "process\"\"\" patches = self.get_pending_patches() self.logger.debug('Files to execute %s' % (patches,))", "make sure you run this script using IronPython') exit(2) import", "self.connection = Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname, DatabaseName=url.path.replace('/', '')) if not is_local_login: self.connection.Login", "return patches def execute_file(self, file): \"\"\"Executes file against database in", "= '\\033[1m' def __init__(self, connection_string, patch_dir='.', exclude_pattern=None, logger=None, stop_on_error=False, noexecute=False,", "e: self.connection.RollBackTransaction() self.logger.error('Exception on %s' % (file,)) message = e.message", "@staticmethod def _create_patch_table_if_not_exists(database): \"\"\"Create patch table in database if not", "format, required') parser.add_argument('--directory', '-d', dest='directory', action='store', default='.', help='directory with patch", "so \"PatchName.sql\" and \"patchname.sql\" is the same') parser.add_argument('--debug', action='store_true', dest='debug',", "table in database if not exists\"\"\" sql = 'select *", "' + __version__) return parser if __name__ == '__main__': #", "else [f.lower() for f in exclude_list] prevdir = os.getcwd() os.chdir(self.patch_dir)", "word') exists = self._create_patch_table_if_not_exists(self.database) if not exists: self.logger.info('[%s] created _patch_history", "patches def execute_file(self, file): \"\"\"Executes file against database in transaction,", "script files from directory\"\"\" _exclude_list = set(exclude_list) if not self.case_insensitive", "def execute_file(self, file): \"\"\"Executes file against database in transaction, returns", "without folder paths') parser.add_argument('--case-insensitive', '-ci', action='store_true', dest='case_insensitive', default=False, help='use case", "False return ret def put_patch(self, file): \"\"\"Write record that file", "connection_string: Connection string in rfc1738 url format :param patch_dir: Patch", "\"D:/1/project/patch\"''') parser.add_argument('--connection', '-c', required=True, dest='connection', action='store', help='connection string in rfc1738", "def get_pending_patches(self): applied_patches = self.get_applied_patches() if self.record_files_only: applied_patches = [os.path.basename(f)", "pass class MsSqlVersion(object): \"\"\" SQL Server patch migration class. \"\"\"", "directory with .sql files :param exclude_pattern: String with regular expression", "to compare patch files so \"PatchName.sql\" and \"patchname.sql\" is the", "ch = logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.setLevel(logging.DEBUG if parser_args.debug else", "raise Exception('Patch folder does not exist') if 'mssql' not in", "self._get_sql_files_from_dir(applied_patches) patches.sort() return patches def execute_file(self, file): \"\"\"Executes file against", "not _file.lower().endswith('.sql') or (self.exclude_pattern and re.search(self.exclude_pattern, file))): continue sql_files.append(file) os.chdir(prevdir)", "in files: file = os.path.normpath(os.path.join(root, file)) _file = file if", "fill(self): \"\"\"Skip scripts execution but add them to patches table\"\"\"", "patch table without folder paths') parser.add_argument('--case-insensitive', '-ci', action='store_true', dest='case_insensitive', default=False,", "os import re import sys import urlparse try: import clr", "try: import clr except ImportError: print('Cannot import crl module, make", "stored to patch table without folder paths \"\"\" url =", "parser.add_argument('--noexecute', '-n', action='store_true', dest='noexecute', default=False, help='displays pending script files with", "sql_files @staticmethod def _create_patch_table_if_not_exists(database): \"\"\"Create patch table in database if", "does not exist') if 'mssql' not in connection_string: raise Exception('Wrong", "and fills patch table') parser.add_argument('--stop-on-error', '-soe', action='store_true', dest='stop_on_error', default=False, help='stops", "try: full_name = os.path.join(os.path.normpath(self.patch_dir), file) with io.open(full_name, 'r', encoding='utf8') as", "is to continue :param case_insensitive: Use case insensitive to compare", "if not exists\"\"\" sql = 'select * from sys.objects where", "_file = _file.lower() if self.record_files_only: _file = os.path.basename(_file) if (_file", "to patch table without folder paths \"\"\" url = urlparse.urlparse(connection_string)", "row in rows]) def _get_sql_files_from_dir(self, exclude_list=[]): \"\"\"Get all script files", "for file in files: file = os.path.normpath(os.path.join(root, file)) _file =", "get_cmd_line_parser(): \"\"\"Get initialized argparse.ArgumentParser object\"\"\" parser = argparse.ArgumentParser( description='MSSQL database", "stored to patch table without folder paths') parser.add_argument('--case-insensitive', '-ci', action='store_true',", "is not None and e.clsException.InnerException.InnerException is not None: message +=", "% (file,)) message = e.message or e if e.clsException.InnerException is", "for logging :param stop_on_error: Stop execution on error, default behavior", "parser_args = parser.parse_args() if parser_args.connection is None or parser_args.directory is", "(patch,)) self.put_patch(patch) def get_pending_patches(self): applied_patches = self.get_applied_patches() if self.record_files_only: applied_patches", "= self.get_pending_patches() self.logger.debug('Files to execute %s' % (patches,)) for patch", "insensitive to compare patch files so \"PatchName.sql\" and \"patchname.sql\" is", "url format, required') parser.add_argument('--directory', '-d', dest='directory', action='store', default='.', help='directory with", "default=False, help='only file names will be stored to patch table", "files with no execution and fills patch table') parser.add_argument('--stop-on-error', '-soe',", "+ 'Execution stopped. Please fix errors and try again.' +", "Exception('Patch folder does not exist') if 'mssql' not in connection_string:", "string in rfc1738 url format, required') parser.add_argument('--directory', '-d', dest='directory', action='store',", "import Microsoft.SqlServer.Management.Smo as Smo import Microsoft.SqlServer.Management.Common as Common __author__ =", "stopped. Please fix errors and try again.' + MsSqlVersion.bcolors.ENDC) raise", "format :param patch_dir: Patch directory with .sql files :param exclude_pattern:", "database.ExecuteWithResults(sql).Tables[0].Rows.Count > 0 if not exists: sql = \"\"\" create", "mssql word') exists = self._create_patch_table_if_not_exists(self.database) if not exists: self.logger.info('[%s] created", "connection_string, patch_dir='.', exclude_pattern=None, logger=None, stop_on_error=False, noexecute=False, case_insensitive=False, record_files_only=False): \"\"\" Initialize", "sql_files = [] for root, dirs, files in os.walk('.'): for", "= '\\033[91m' ENDC = '\\033[0m' BOLD = '\\033[1m' def __init__(self,", "import re import sys import urlparse try: import clr except", "patch in patches: success = self.execute_file(patch) if success: self.executed_count +=", "string in rfc1738 url format :param patch_dir: Patch directory with", "parser.print_help() exit(1) # logging logger = logging.getLogger('mssql') if parser_args.log: fh", "% (self.database.Name, full_name, message)) ret = False return ret def", "= '\\033[94m' OKGREEN = '\\033[92m' WARNING = '\\033[93m' FAIL =", "be stored to patch table without folder paths \"\"\" url", "paths') parser.add_argument('--case-insensitive', '-ci', action='store_true', dest='case_insensitive', default=False, help='use case insensitive to", "MsSqlVersion.bcolors.ENDC) raise ScriptExecutionError() self.logger.info('[%s] Executed %d patch(-es)' % (self.database.Name, self.executed_count))", "sys.objects where object_id = object_id(\\'_patch_history\\') AND type in (\\'U\\');' exists", "pending script files with no execution') parser.add_argument('--noexecute-fill', '-nf', action='store_true', dest='noexecute_fill',", "case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only, logger=logger) if parser_args.noexecute: for patch in sqlvc.get_pending_patches(): logger.info('", "exists = database.ExecuteWithResults(sql).Tables[0].Rows.Count > 0 if not exists: sql =", "__license__ = 'BSD' __version__ = '1.4.5' __all__ = ['MsSqlVersion'] class", "argparse.ArgumentParser( description='MSSQL database patch history tool', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example: %(prog)s -c", "_exclude_list = set(exclude_list) if not self.case_insensitive else [f.lower() for f", "sql = \"\"\" create table [_patch_history] (id int not null", "self.record_files_only = record_files_only self.executed_count = 0 self.logger = logging.NullHandler() if", "if self.record_files_only: applied_patches = [os.path.basename(f) for f in applied_patches] patches", "encoding='utf8') as sql_file: sql = sql_file.read() self.logger.info('[%s] Executing %s...' %", "parser.add_argument('--record-files-only', '-rfo', action='store_true', dest='record_files_only', default=False, help='only file names will be", "[] for root, dirs, files in os.walk('.'): for file in", "_exclude_list or not _file.lower().endswith('.sql') or (self.exclude_pattern and re.search(self.exclude_pattern, file))): continue", "bcolors: OKBLUE = '\\033[94m' OKGREEN = '\\033[92m' WARNING = '\\033[93m'", "is the same') parser.add_argument('--debug', action='store_true', dest='debug', default=False, help='enables debug output')", "'__main__': # parser parser = get_cmd_line_parser() parser_args = parser.parse_args() if", "in database if not exists\"\"\" sql = 'select * from", "root, dirs, files in os.walk('.'): for file in files: file", "parser_args.debug else logging.INFO) logger.addHandler(ch) # database handle sqlvc = MsSqlVersion(parser_args.connection,", "\"\"\"Skip scripts execution but add them to patches table\"\"\" patches", "MsSqlVersion(parser_args.connection, parser_args.directory, exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only, logger=logger) if parser_args.noexecute: for", "patch_dir: Patch directory with .sql files :param exclude_pattern: String with", "class ScriptExecutionError(Exception): pass class MsSqlVersion(object): \"\"\" SQL Server patch migration", "e.clsException.InnerException is not None and e.clsException.InnerException.InnerException is not None: message", "exists: sql = \"\"\" create table [_patch_history] (id int not", "Logger that is used for logging :param stop_on_error: Stop execution", "patch files') parser.add_argument('--log', '-l', dest='log', action='store', help='log file') parser.add_argument('--noexecute', '-n',", "self.connection.Login = url.username self.connection.Password = <PASSWORD> self.server = Smo.Server(self.connection) self.database", "as e: self.connection.RollBackTransaction() self.logger.error('Exception on %s' % (file,)) message =", "+= ' ' + e.clsException.InnerException.InnerException.Message self.logger.error('[%s] %s (%s)' % (self.database.Name,", "%(message)s')) logger.setLevel(logging.DEBUG if parser_args.debug else logging.INFO) logger.addHandler(ch) # database handle", "is_local_login = not url.username self.connection = Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname, DatabaseName=url.path.replace('/', ''))", "default behavior is to continue :param case_insensitive: Use case insensitive", "'Copyright (c) 2015-2016, Saritasa' __license__ = 'BSD' __version__ = '1.4.5'", "import os import re import sys import urlparse try: import", "MsSqlVersion(object): \"\"\" SQL Server patch migration class. \"\"\" class bcolors:", ":param stop_on_error: Stop execution on error, default behavior is to", "(patches,)) for patch in patches: success = self.execute_file(patch) if success:", "(self.database.Name, self.executed_count)) def fill(self): \"\"\"Skip scripts execution but add them", "logging.INFO) logger.addHandler(ch) # database handle sqlvc = MsSqlVersion(parser_args.connection, parser_args.directory, exclude_pattern=parser_args.exclude_pattern,", "parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + __version__) return parser if", "= os.path.join(os.path.normpath(self.patch_dir), file) with io.open(full_name, 'r', encoding='utf8') as sql_file: sql", "success = self.execute_file(patch) if success: self.executed_count += 1 self.put_patch(patch) if", "default='.', help='directory with patch files') parser.add_argument('--log', '-l', dest='log', action='store', help='log", "record_files_only self.executed_count = 0 self.logger = logging.NullHandler() if not logger", "def update(self): \"\"\"Executes database update process\"\"\" patches = self.get_pending_patches() self.logger.debug('Files", "file against database in transaction, returns True if success\"\"\" ret", "= set(exclude_list) if not self.case_insensitive else [f.lower() for f in", "%s (%s)' % (self.database.Name, full_name, message)) ret = False return", "SQL Server patch migration class. \"\"\" class bcolors: OKBLUE =", "database if not exists\"\"\" sql = 'select * from sys.objects", "dest='stop_on_error', default=False, help='stops execution if any script fails') parser.add_argument('--exclude-pattern', '-ep',", "script fails') parser.add_argument('--exclude-pattern', '-ep', dest='exclude_pattern', help='skips files match to regular", "(self.database.Name, full_name, message)) ret = False return ret def put_patch(self,", "executed\"\"\" now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') if self.record_files_only: file = os.path.basename(file)", "action='store_true', dest='stop_on_error', default=False, help='stops execution if any script fails') parser.add_argument('--exclude-pattern',", "+ __version__) return parser if __name__ == '__main__': # parser", "' ' + e.clsException.InnerException.InnerException.Message self.logger.error('[%s] %s (%s)' % (self.database.Name, full_name,", "insensitive to compare patch files :param record_files_only: Only file names", "files with no execution') parser.add_argument('--noexecute-fill', '-nf', action='store_true', dest='noexecute_fill', default=False, help='displays", "import urlparse try: import clr except ImportError: print('Cannot import crl", "self.get_applied_patches() if self.record_files_only: applied_patches = [os.path.basename(f) for f in applied_patches]", "varchar(100) not null, applied_at datetime not null); alter table [_patch_history]", ":param patch_dir: Patch directory with .sql files :param exclude_pattern: String", "required') parser.add_argument('--directory', '-d', dest='directory', action='store', default='.', help='directory with patch files')", "= patch_dir self.stop_on_error = stop_on_error self.case_insensitive = case_insensitive self.record_files_only =", "logger else logger if not os.path.exists(patch_dir): raise Exception('Patch folder does", "return sql_files @staticmethod def _create_patch_table_if_not_exists(database): \"\"\"Create patch table in database", "% (patches,)) for patch in patches: success = self.execute_file(patch) if", "table [_patch_history] add constraint _patch_history_PK primary key clustered (id); \"\"\"", "self.logger.critical(MsSqlVersion.bcolors.WARNING + 'Execution stopped. Please fix errors and try again.'", "[_patch_history] (name, applied_at) values(\\'%s\\', \\'%s\\');' % (file, now) self.database.ExecuteNonQuery(sql) def", "files: file = os.path.normpath(os.path.join(root, file)) _file = file if self.case_insensitive:", "the BSD license. See LICENSE file in the project root", "migration class. \"\"\" class bcolors: OKBLUE = '\\033[94m' OKGREEN =", "root for full license information. \"\"\" import argparse import datetime", "execute %s' % (patches,)) for patch in patches: success =", "exclude_list=[]): \"\"\"Get all script files from directory\"\"\" _exclude_list = set(exclude_list)", "sqlvc.get_pending_patches(): logger.info(' ' + patch) elif parser_args.noexecute_fill: sqlvc.fill() else: sqlvc.update()", "record_files_only: Only file names will be stored to patch table", "%s' % (patch,)) self.put_patch(patch) def get_pending_patches(self): applied_patches = self.get_applied_patches() if", "__author__ = '<NAME>' __copyright__ = 'Copyright (c) 2015-2016, Saritasa' __license__", "logging.getLogger('mssql') if parser_args.log: fh = logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.addHandler(fh)", "record_files_only=False): \"\"\" Initialize instance with connection and database objects. :param", "= '<NAME>' __copyright__ = 'Copyright (c) 2015-2016, Saritasa' __license__ =", "if 'mssql' not in connection_string: raise Exception('Wrong connection string, it", "error, default behavior is to continue :param case_insensitive: Use case", "set(exclude_list) if not self.case_insensitive else [f.lower() for f in exclude_list]", "ServerInstance=url.hostname, DatabaseName=url.path.replace('/', '')) if not is_local_login: self.connection.Login = url.username self.connection.Password", "Smo.Server(self.connection) self.database = self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout = 90 self.exclude_pattern = exclude_pattern", "(file, now) self.database.ExecuteNonQuery(sql) def get_applied_patches(self): rows = self.database.ExecuteWithResults('select name from", "where object_id = object_id(\\'_patch_history\\') AND type in (\\'U\\');' exists =", "patches = self.get_pending_patches() for patch in patches: self.logger.info('Add file %s'", "os.chdir(prevdir) return sql_files @staticmethod def _create_patch_table_if_not_exists(database): \"\"\"Create patch table in", "message += ' ' + e.clsException.InnerException.InnerException.Message self.logger.error('[%s] %s (%s)' %", "= logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.setLevel(logging.DEBUG if parser_args.debug else logging.INFO)", "case_insensitive: Use case insensitive to compare patch files :param record_files_only:", "paths \"\"\" url = urlparse.urlparse(connection_string) is_local_login = not url.username self.connection", "self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout = 90 self.exclude_pattern = exclude_pattern self.patch_dir = patch_dir", "= self.database.ExecuteWithResults('select name from [_patch_history];').Tables[0].Rows return set([row['name'] for row in", "'1.4.5' __all__ = ['MsSqlVersion'] class ScriptExecutionError(Exception): pass class MsSqlVersion(object): \"\"\"", "= object_id(\\'_patch_history\\') AND type in (\\'U\\');' exists = database.ExecuteWithResults(sql).Tables[0].Rows.Count >", "(id); \"\"\" database.ExecuteNonQuery(sql) return exists def get_cmd_line_parser(): \"\"\"Get initialized argparse.ArgumentParser", "import clr except ImportError: print('Cannot import crl module, make sure", "name varchar(100) not null, applied_at datetime not null); alter table", "applied_patches = self.get_applied_patches() if self.record_files_only: applied_patches = [os.path.basename(f) for f", "Please fix errors and try again.' + MsSqlVersion.bcolors.ENDC) raise ScriptExecutionError()", "been executed\"\"\" now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') if self.record_files_only: file =", "_file = os.path.basename(_file) if (_file in _exclude_list or not _file.lower().endswith('.sql')", "Connection string in rfc1738 url format :param patch_dir: Patch directory", "'mssql' not in connection_string: raise Exception('Wrong connection string, it should", "files should match :param logger: Logger that is used for", "fix errors and try again.' + MsSqlVersion.bcolors.ENDC) raise ScriptExecutionError() self.logger.info('[%s]", "%d patch(-es)' % (self.database.Name, self.executed_count)) def fill(self): \"\"\"Skip scripts execution", "if self.record_files_only: file = os.path.basename(file) sql = 'insert [_patch_history] (name,", "os.path.exists(patch_dir): raise Exception('Patch folder does not exist') if 'mssql' not", "initialized argparse.ArgumentParser object\"\"\" parser = argparse.ArgumentParser( description='MSSQL database patch history", "on %s' % (file,)) message = e.message or e if", "True try: full_name = os.path.join(os.path.normpath(self.patch_dir), file) with io.open(full_name, 'r', encoding='utf8')", "the same') parser.add_argument('--debug', action='store_true', dest='debug', default=False, help='enables debug output') parser.add_argument('--version',", "= record_files_only self.executed_count = 0 self.logger = logging.NullHandler() if not", "'-nf', action='store_true', dest='noexecute_fill', default=False, help='displays pending script files with no", "parser_args.directory is None: parser.print_help() exit(1) # logging logger = logging.getLogger('mssql')", "System clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import Microsoft.SqlServer.Management.Smo as Smo import Microsoft.SqlServer.Management.Common", "if not logger else logger if not os.path.exists(patch_dir): raise Exception('Patch", "self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction() except Exception as e: self.connection.RollBackTransaction() self.logger.error('Exception on", "is_local_login: self.connection.Login = url.username self.connection.Password = <PASSWORD> self.server = Smo.Server(self.connection)", ":param connection_string: Connection string in rfc1738 url format :param patch_dir:", "default=False, help='use case insensitive to compare patch files so \"PatchName.sql\"", "argparse import datetime import io import logging import os import", "connection and database objects. :param connection_string: Connection string in rfc1738", "from directory\"\"\" _exclude_list = set(exclude_list) if not self.case_insensitive else [f.lower()", "= [] for root, dirs, files in os.walk('.'): for file", "(id int not null identity(1, 1), name varchar(100) not null,", "(c) 2015-2016, Saritasa' __license__ = 'BSD' __version__ = '1.4.5' __all__", "not success and self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING + 'Execution stopped. Please fix", "update process\"\"\" patches = self.get_pending_patches() self.logger.debug('Files to execute %s' %", "ret = False return ret def put_patch(self, file): \"\"\"Write record", "file') parser.add_argument('--noexecute', '-n', action='store_true', dest='noexecute', default=False, help='displays pending script files", "logger.setLevel(logging.DEBUG if parser_args.debug else logging.INFO) logger.addHandler(ch) # database handle sqlvc", "BSD license. See LICENSE file in the project root for", "'\\033[93m' FAIL = '\\033[91m' ENDC = '\\033[0m' BOLD = '\\033[1m'", "patch in sqlvc.get_pending_patches(): logger.info(' ' + patch) elif parser_args.noexecute_fill: sqlvc.fill()", "continue :param case_insensitive: Use case insensitive to compare patch files", "try again.' + MsSqlVersion.bcolors.ENDC) raise ScriptExecutionError() self.logger.info('[%s] Executed %d patch(-es)'", "folder paths \"\"\" url = urlparse.urlparse(connection_string) is_local_login = not url.username", "= os.path.basename(file) sql = 'insert [_patch_history] (name, applied_at) values(\\'%s\\', \\'%s\\');'", "'-l', dest='log', action='store', help='log file') parser.add_argument('--noexecute', '-n', action='store_true', dest='noexecute', default=False,", "'-d', dest='directory', action='store', default='.', help='directory with patch files') parser.add_argument('--log', '-l',", "-*- coding: utf-8 -*- \"\"\" mssqlvc ~~~~~~~ Database version control", "in sqlvc.get_pending_patches(): logger.info(' ' + patch) elif parser_args.noexecute_fill: sqlvc.fill() else:", "errors and try again.' + MsSqlVersion.bcolors.ENDC) raise ScriptExecutionError() self.logger.info('[%s] Executed", "set([row['name'] for row in rows]) def _get_sql_files_from_dir(self, exclude_list=[]): \"\"\"Get all", "logger=None, stop_on_error=False, noexecute=False, case_insensitive=False, record_files_only=False): \"\"\" Initialize instance with connection", "See LICENSE file in the project root for full license", "used for logging :param stop_on_error: Stop execution on error, default", "raise ScriptExecutionError() self.logger.info('[%s] Executed %d patch(-es)' % (self.database.Name, self.executed_count)) def", "ret def put_patch(self, file): \"\"\"Write record that file has been", "module, make sure you run this script using IronPython') exit(2)", "this script using IronPython') exit(2) import System clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo')", "sql_file.read() self.logger.info('[%s] Executing %s...' % (self.database.Name, file)) self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction()", "scripts execution but add them to patches table\"\"\" patches =", "in connection_string: raise Exception('Wrong connection string, it should contain mssql", "[_patch_history] add constraint _patch_history_PK primary key clustered (id); \"\"\" database.ExecuteNonQuery(sql)", "= not url.username self.connection = Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname, DatabaseName=url.path.replace('/', '')) if", "self.executed_count += 1 self.put_patch(patch) if not success and self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING", "parser.add_argument('--directory', '-d', dest='directory', action='store', default='.', help='directory with patch files') parser.add_argument('--log',", "logger = logging.getLogger('mssql') if parser_args.log: fh = logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s]", "add them to patches table\"\"\" patches = self.get_pending_patches() for patch", "null, applied_at datetime not null); alter table [_patch_history] add constraint", "(name, applied_at) values(\\'%s\\', \\'%s\\');' % (file, now) self.database.ExecuteNonQuery(sql) def get_applied_patches(self):", "= datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') if self.record_files_only: file = os.path.basename(file) sql =", "exclude_list] prevdir = os.getcwd() os.chdir(self.patch_dir) sql_files = [] for root,", "rfc1738 url format :param patch_dir: Patch directory with .sql files", "get_cmd_line_parser() parser_args = parser.parse_args() if parser_args.connection is None or parser_args.directory", "rows = self.database.ExecuteWithResults('select name from [_patch_history];').Tables[0].Rows return set([row['name'] for row", "default=False, help='displays pending script files with no execution and fills", "dest='case_insensitive', default=False, help='use case insensitive to compare patch files so", "logger if not os.path.exists(patch_dir): raise Exception('Patch folder does not exist')", "default=False, help='displays pending script files with no execution') parser.add_argument('--noexecute-fill', '-nf',", "self.executed_count)) def fill(self): \"\"\"Skip scripts execution but add them to", "urlparse.urlparse(connection_string) is_local_login = not url.username self.connection = Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname, DatabaseName=url.path.replace('/',", "no execution') parser.add_argument('--noexecute-fill', '-nf', action='store_true', dest='noexecute_fill', default=False, help='displays pending script", "> 0 if not exists: sql = \"\"\" create table", "f in applied_patches] patches = self._get_sql_files_from_dir(applied_patches) patches.sort() return patches def", "but add them to patches table\"\"\" patches = self.get_pending_patches() for", "[_patch_history];').Tables[0].Rows return set([row['name'] for row in rows]) def _get_sql_files_from_dir(self, exclude_list=[]):", "(self.exclude_pattern and re.search(self.exclude_pattern, file))): continue sql_files.append(file) os.chdir(prevdir) return sql_files @staticmethod", "OKGREEN = '\\033[92m' WARNING = '\\033[93m' FAIL = '\\033[91m' ENDC", "self.logger.info('Add file %s' % (patch,)) self.put_patch(patch) def get_pending_patches(self): applied_patches =", "not None and e.clsException.InnerException.InnerException is not None: message += '", "Executed %d patch(-es)' % (self.database.Name, self.executed_count)) def fill(self): \"\"\"Skip scripts", "for f in exclude_list] prevdir = os.getcwd() os.chdir(self.patch_dir) sql_files =", "should contain mssql word') exists = self._create_patch_table_if_not_exists(self.database) if not exists:", "f in exclude_list] prevdir = os.getcwd() os.chdir(self.patch_dir) sql_files = []", "self.server.ConnectionContext.ConnectTimeout = 90 self.exclude_pattern = exclude_pattern self.patch_dir = patch_dir self.stop_on_error", "os.path.basename(_file) if (_file in _exclude_list or not _file.lower().endswith('.sql') or (self.exclude_pattern", "stop_on_error self.case_insensitive = case_insensitive self.record_files_only = record_files_only self.executed_count = 0", "[%(levelname)s] %(message)s')) logger.setLevel(logging.DEBUG if parser_args.debug else logging.INFO) logger.addHandler(ch) # database", "folder does not exist') if 'mssql' not in connection_string: raise", "= <PASSWORD> self.server = Smo.Server(self.connection) self.database = self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout =", "'<NAME>' __copyright__ = 'Copyright (c) 2015-2016, Saritasa' __license__ = 'BSD'", "datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') if self.record_files_only: file = os.path.basename(file) sql = 'insert", "def _create_patch_table_if_not_exists(database): \"\"\"Create patch table in database if not exists\"\"\"", "import datetime import io import logging import os import re", "'-c', required=True, dest='connection', action='store', help='connection string in rfc1738 url format,", "self.logger.error('[%s] %s (%s)' % (self.database.Name, full_name, message)) ret = False", "exists = self._create_patch_table_if_not_exists(self.database) if not exists: self.logger.info('[%s] created _patch_history table'", "Database version control utility for Microsoft SQL Server. See README.md", "history tool', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example: %(prog)s -c \"mssql://sa:123@host\\instance/database\" -d \"D:/1/project/patch\"''') parser.add_argument('--connection',", "if (_file in _exclude_list or not _file.lower().endswith('.sql') or (self.exclude_pattern and", "instance with connection and database objects. :param connection_string: Connection string", "if not exists: self.logger.info('[%s] created _patch_history table' % (self.database.Name,)) def", "self.case_insensitive else [f.lower() for f in exclude_list] prevdir = os.getcwd()", "parser.add_argument('--log', '-l', dest='log', action='store', help='log file') parser.add_argument('--noexecute', '-n', action='store_true', dest='noexecute',", "self.get_pending_patches() self.logger.debug('Files to execute %s' % (patches,)) for patch in", "e.clsException.InnerException.InnerException is not None: message += ' ' + e.clsException.InnerException.InnerException.Message", "= '1.4.5' __all__ = ['MsSqlVersion'] class ScriptExecutionError(Exception): pass class MsSqlVersion(object):", "help='connection string in rfc1738 url format, required') parser.add_argument('--directory', '-d', dest='directory',", "io import logging import os import re import sys import", "parser_args.noexecute: for patch in sqlvc.get_pending_patches(): logger.info(' ' + patch) elif", "help='enables debug output') parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + __version__)", "= case_insensitive self.record_files_only = record_files_only self.executed_count = 0 self.logger =", "not url.username self.connection = Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname, DatabaseName=url.path.replace('/', '')) if not", "ScriptExecutionError(Exception): pass class MsSqlVersion(object): \"\"\" SQL Server patch migration class.", "if self.server: self.server.ConnectionContext.Disconnect() def update(self): \"\"\"Executes database update process\"\"\" patches", "= urlparse.urlparse(connection_string) is_local_login = not url.username self.connection = Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname,", "version control utility for Microsoft SQL Server. See README.md for", "= sql_file.read() self.logger.info('[%s] Executing %s...' % (self.database.Name, file)) self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql)", "= 'Copyright (c) 2015-2016, Saritasa' __license__ = 'BSD' __version__ =", "message = e.message or e if e.clsException.InnerException is not None", "constraint _patch_history_PK primary key clustered (id); \"\"\" database.ExecuteNonQuery(sql) return exists", "ScriptExecutionError() self.logger.info('[%s] Executed %d patch(-es)' % (self.database.Name, self.executed_count)) def fill(self):", "patch table in database if not exists\"\"\" sql = 'select", "# logging logger = logging.getLogger('mssql') if parser_args.log: fh = logging.FileHandler(parser_args.log)", "for patch in patches: self.logger.info('Add file %s' % (patch,)) self.put_patch(patch)", "under the BSD license. See LICENSE file in the project", "= Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname, DatabaseName=url.path.replace('/', '')) if not is_local_login: self.connection.Login =", "def get_cmd_line_parser(): \"\"\"Get initialized argparse.ArgumentParser object\"\"\" parser = argparse.ArgumentParser( description='MSSQL", "pending script files with no execution and fills patch table')", "in patches: success = self.execute_file(patch) if success: self.executed_count += 1", "= self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout = 90 self.exclude_pattern = exclude_pattern self.patch_dir =", "patches = self._get_sql_files_from_dir(applied_patches) patches.sort() return patches def execute_file(self, file): \"\"\"Executes", "Microsoft.SqlServer.Management.Smo as Smo import Microsoft.SqlServer.Management.Common as Common __author__ = '<NAME>'", "'\\033[0m' BOLD = '\\033[1m' def __init__(self, connection_string, patch_dir='.', exclude_pattern=None, logger=None,", "script using IronPython') exit(2) import System clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import", "_file.lower() if self.record_files_only: _file = os.path.basename(_file) if (_file in _exclude_list", "%(message)s')) logger.addHandler(fh) ch = logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.setLevel(logging.DEBUG if", "README.md for more information. Licensed under the BSD license. See", "transaction, returns True if success\"\"\" ret = True try: full_name", "file in files: file = os.path.normpath(os.path.join(root, file)) _file = file", "Initialize instance with connection and database objects. :param connection_string: Connection", "if not self.case_insensitive else [f.lower() for f in exclude_list] prevdir", "_file = file if self.case_insensitive: _file = _file.lower() if self.record_files_only:", "exclude_pattern: String with regular expression the patch files should match", "execution') parser.add_argument('--noexecute-fill', '-nf', action='store_true', dest='noexecute_fill', default=False, help='displays pending script files", "patch files should match :param logger: Logger that is used", "\"\"\" class bcolors: OKBLUE = '\\033[94m' OKGREEN = '\\033[92m' WARNING", "return ret def put_patch(self, file): \"\"\"Write record that file has", "action='store', default='.', help='directory with patch files') parser.add_argument('--log', '-l', dest='log', action='store',", "table [_patch_history] (id int not null identity(1, 1), name varchar(100)", "'-rfo', action='store_true', dest='record_files_only', default=False, help='only file names will be stored", "to patch table without folder paths') parser.add_argument('--case-insensitive', '-ci', action='store_true', dest='case_insensitive',", "'\\033[1m' def __init__(self, connection_string, patch_dir='.', exclude_pattern=None, logger=None, stop_on_error=False, noexecute=False, case_insensitive=False,", "self.database.ExecuteWithResults('select name from [_patch_history];').Tables[0].Rows return set([row['name'] for row in rows])", "self.logger.error('Exception on %s' % (file,)) message = e.message or e", "not self.case_insensitive else [f.lower() for f in exclude_list] prevdir =", "logging import os import re import sys import urlparse try:", "case_insensitive=False, record_files_only=False): \"\"\" Initialize instance with connection and database objects.", "import argparse import datetime import io import logging import os", "\"\"\"Get all script files from directory\"\"\" _exclude_list = set(exclude_list) if", "dirs, files in os.walk('.'): for file in files: file =", "action='store', help='connection string in rfc1738 url format, required') parser.add_argument('--directory', '-d',", "self.server: self.server.ConnectionContext.Disconnect() def update(self): \"\"\"Executes database update process\"\"\" patches =", "'-ep', dest='exclude_pattern', help='skips files match to regular expression') parser.add_argument('--record-files-only', '-rfo',", "primary key clustered (id); \"\"\" database.ExecuteNonQuery(sql) return exists def get_cmd_line_parser():", "in rows]) def _get_sql_files_from_dir(self, exclude_list=[]): \"\"\"Get all script files from", "<PASSWORD> self.server = Smo.Server(self.connection) self.database = self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout = 90", "WARNING = '\\033[93m' FAIL = '\\033[91m' ENDC = '\\033[0m' BOLD", "patch in patches: self.logger.info('Add file %s' % (patch,)) self.put_patch(patch) def", "== '__main__': # parser parser = get_cmd_line_parser() parser_args = parser.parse_args()", "1), name varchar(100) not null, applied_at datetime not null); alter", "for more information. Licensed under the BSD license. See LICENSE", "ret = True try: full_name = os.path.join(os.path.normpath(self.patch_dir), file) with io.open(full_name,", "files :param exclude_pattern: String with regular expression the patch files", "parser = argparse.ArgumentParser( description='MSSQL database patch history tool', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example:", "parser.parse_args() if parser_args.connection is None or parser_args.directory is None: parser.print_help()", "import crl module, make sure you run this script using", "except Exception as e: self.connection.RollBackTransaction() self.logger.error('Exception on %s' % (file,))", "logging :param stop_on_error: Stop execution on error, default behavior is", "fh = logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.addHandler(fh) ch = logging.StreamHandler()", "or e if e.clsException.InnerException is not None and e.clsException.InnerException.InnerException is", "help='displays pending script files with no execution') parser.add_argument('--noexecute-fill', '-nf', action='store_true',", "file = os.path.normpath(os.path.join(root, file)) _file = file if self.case_insensitive: _file", "__version__) return parser if __name__ == '__main__': # parser parser", "'\\033[91m' ENDC = '\\033[0m' BOLD = '\\033[1m' def __init__(self, connection_string,", "\"\"\"Create patch table in database if not exists\"\"\" sql =", "self.case_insensitive: _file = _file.lower() if self.record_files_only: _file = os.path.basename(_file) if", "mssqlvc ~~~~~~~ Database version control utility for Microsoft SQL Server.", "__init__(self, connection_string, patch_dir='.', exclude_pattern=None, logger=None, stop_on_error=False, noexecute=False, case_insensitive=False, record_files_only=False): \"\"\"", "\"\"\" url = urlparse.urlparse(connection_string) is_local_login = not url.username self.connection =", "has been executed\"\"\" now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') if self.record_files_only: file", "all script files from directory\"\"\" _exclude_list = set(exclude_list) if not", "object_id = object_id(\\'_patch_history\\') AND type in (\\'U\\');' exists = database.ExecuteWithResults(sql).Tables[0].Rows.Count", "= self._get_sql_files_from_dir(applied_patches) patches.sort() return patches def execute_file(self, file): \"\"\"Executes file", "fails') parser.add_argument('--exclude-pattern', '-ep', dest='exclude_pattern', help='skips files match to regular expression')", "print('Cannot import crl module, make sure you run this script", "patch(-es)' % (self.database.Name, self.executed_count)) def fill(self): \"\"\"Skip scripts execution but", "None: message += ' ' + e.clsException.InnerException.InnerException.Message self.logger.error('[%s] %s (%s)'", "self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction() except Exception as e: self.connection.RollBackTransaction() self.logger.error('Exception on %s'", "sql = sql_file.read() self.logger.info('[%s] Executing %s...' % (self.database.Name, file)) self.connection.BeginTransaction()", "import io import logging import os import re import sys", "io.open(full_name, 'r', encoding='utf8') as sql_file: sql = sql_file.read() self.logger.info('[%s] Executing", "self.logger.debug('Files to execute %s' % (patches,)) for patch in patches:", "full_name, message)) ret = False return ret def put_patch(self, file):", "no execution and fills patch table') parser.add_argument('--stop-on-error', '-soe', action='store_true', dest='stop_on_error',", "action='store_true', dest='debug', default=False, help='enables debug output') parser.add_argument('--version', '-v', action='version', version='%(prog)s", "is None: parser.print_help() exit(1) # logging logger = logging.getLogger('mssql') if", "= 0 self.logger = logging.NullHandler() if not logger else logger", "\"\"\" create table [_patch_history] (id int not null identity(1, 1),", "def __init__(self, connection_string, patch_dir='.', exclude_pattern=None, logger=None, stop_on_error=False, noexecute=False, case_insensitive=False, record_files_only=False):", "file) with io.open(full_name, 'r', encoding='utf8') as sql_file: sql = sql_file.read()", "against database in transaction, returns True if success\"\"\" ret =", "match :param logger: Logger that is used for logging :param", "BOLD = '\\033[1m' def __init__(self, connection_string, patch_dir='.', exclude_pattern=None, logger=None, stop_on_error=False,", "Server. See README.md for more information. Licensed under the BSD", "self.record_files_only: file = os.path.basename(file) sql = 'insert [_patch_history] (name, applied_at)", "now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') if self.record_files_only: file = os.path.basename(file) sql", "'-ci', action='store_true', dest='case_insensitive', default=False, help='use case insensitive to compare patch", "file): \"\"\"Executes file against database in transaction, returns True if", "file): \"\"\"Write record that file has been executed\"\"\" now =", "clr except ImportError: print('Cannot import crl module, make sure you", "database objects. :param connection_string: Connection string in rfc1738 url format", "0 if not exists: sql = \"\"\" create table [_patch_history]", "stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only, logger=logger) if parser_args.noexecute: for patch in sqlvc.get_pending_patches():", "output') parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + __version__) return parser", "objects. :param connection_string: Connection string in rfc1738 url format :param", "(self.database.Name, file)) self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction() except Exception as e: self.connection.RollBackTransaction()", "and \"patchname.sql\" is the same') parser.add_argument('--debug', action='store_true', dest='debug', default=False, help='enables", "_patch_history table' % (self.database.Name,)) def __del__(self): if self.server: self.server.ConnectionContext.Disconnect() def", "fills patch table') parser.add_argument('--stop-on-error', '-soe', action='store_true', dest='stop_on_error', default=False, help='stops execution", "object\"\"\" parser = argparse.ArgumentParser( description='MSSQL database patch history tool', formatter_class=argparse.RawDescriptionHelpFormatter,", "dest='log', action='store', help='log file') parser.add_argument('--noexecute', '-n', action='store_true', dest='noexecute', default=False, help='displays", "behavior is to continue :param case_insensitive: Use case insensitive to", "_file.lower().endswith('.sql') or (self.exclude_pattern and re.search(self.exclude_pattern, file))): continue sql_files.append(file) os.chdir(prevdir) return", "Saritasa' __license__ = 'BSD' __version__ = '1.4.5' __all__ = ['MsSqlVersion']", "= True try: full_name = os.path.join(os.path.normpath(self.patch_dir), file) with io.open(full_name, 'r',", "database.ExecuteNonQuery(sql) return exists def get_cmd_line_parser(): \"\"\"Get initialized argparse.ArgumentParser object\"\"\" parser", "else logging.INFO) logger.addHandler(ch) # database handle sqlvc = MsSqlVersion(parser_args.connection, parser_args.directory,", "action='store_true', dest='noexecute', default=False, help='displays pending script files with no execution')", "debug output') parser.add_argument('--version', '-v', action='version', version='%(prog)s ' + __version__) return", "+= 1 self.put_patch(patch) if not success and self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING +", "connection string, it should contain mssql word') exists = self._create_patch_table_if_not_exists(self.database)", "will be stored to patch table without folder paths \"\"\"", "formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example: %(prog)s -c \"mssql://sa:123@host\\instance/database\" -d \"D:/1/project/patch\"''') parser.add_argument('--connection', '-c', required=True,", "self.logger.info('[%s] Executed %d patch(-es)' % (self.database.Name, self.executed_count)) def fill(self): \"\"\"Skip", "execution but add them to patches table\"\"\" patches = self.get_pending_patches()", "from [_patch_history];').Tables[0].Rows return set([row['name'] for row in rows]) def _get_sql_files_from_dir(self,", "and try again.' + MsSqlVersion.bcolors.ENDC) raise ScriptExecutionError() self.logger.info('[%s] Executed %d", "expression') parser.add_argument('--record-files-only', '-rfo', action='store_true', dest='record_files_only', default=False, help='only file names will", "SQL Server. See README.md for more information. Licensed under the", "DatabaseName=url.path.replace('/', '')) if not is_local_login: self.connection.Login = url.username self.connection.Password =", "self.executed_count = 0 self.logger = logging.NullHandler() if not logger else", "table\"\"\" patches = self.get_pending_patches() for patch in patches: self.logger.info('Add file", "if not exists: sql = \"\"\" create table [_patch_history] (id", "# database handle sqlvc = MsSqlVersion(parser_args.connection, parser_args.directory, exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive,", "parser.add_argument('--debug', action='store_true', dest='debug', default=False, help='enables debug output') parser.add_argument('--version', '-v', action='version',", "exit(1) # logging logger = logging.getLogger('mssql') if parser_args.log: fh =", "string, it should contain mssql word') exists = self._create_patch_table_if_not_exists(self.database) if", "rows]) def _get_sql_files_from_dir(self, exclude_list=[]): \"\"\"Get all script files from directory\"\"\"", "Executing %s...' % (self.database.Name, file)) self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction() except Exception", "* from sys.objects where object_id = object_id(\\'_patch_history\\') AND type in", "import sys import urlparse try: import clr except ImportError: print('Cannot", "success\"\"\" ret = True try: full_name = os.path.join(os.path.normpath(self.patch_dir), file) with", "Use case insensitive to compare patch files :param record_files_only: Only", "not null); alter table [_patch_history] add constraint _patch_history_PK primary key", "'')) if not is_local_login: self.connection.Login = url.username self.connection.Password = <PASSWORD>", "FAIL = '\\033[91m' ENDC = '\\033[0m' BOLD = '\\033[1m' def", "def fill(self): \"\"\"Skip scripts execution but add them to patches", "not exists\"\"\" sql = 'select * from sys.objects where object_id", "should match :param logger: Logger that is used for logging", "if __name__ == '__main__': # parser parser = get_cmd_line_parser() parser_args", "self.record_files_only: applied_patches = [os.path.basename(f) for f in applied_patches] patches =", "not null, applied_at datetime not null); alter table [_patch_history] add", "datetime not null); alter table [_patch_history] add constraint _patch_history_PK primary", "\"\"\"Executes file against database in transaction, returns True if success\"\"\"", "_create_patch_table_if_not_exists(database): \"\"\"Create patch table in database if not exists\"\"\" sql", "= 'select * from sys.objects where object_id = object_id(\\'_patch_history\\') AND", "action='store_true', dest='record_files_only', default=False, help='only file names will be stored to", "table without folder paths') parser.add_argument('--case-insensitive', '-ci', action='store_true', dest='case_insensitive', default=False, help='use", "with regular expression the patch files should match :param logger:", "= ['MsSqlVersion'] class ScriptExecutionError(Exception): pass class MsSqlVersion(object): \"\"\" SQL Server", "files') parser.add_argument('--log', '-l', dest='log', action='store', help='log file') parser.add_argument('--noexecute', '-n', action='store_true',", "Server patch migration class. \"\"\" class bcolors: OKBLUE = '\\033[94m'", "if not os.path.exists(patch_dir): raise Exception('Patch folder does not exist') if", "os.path.basename(file) sql = 'insert [_patch_history] (name, applied_at) values(\\'%s\\', \\'%s\\');' %", "String with regular expression the patch files should match :param", "'-v', action='version', version='%(prog)s ' + __version__) return parser if __name__", "stop_on_error=False, noexecute=False, case_insensitive=False, record_files_only=False): \"\"\" Initialize instance with connection and", "run this script using IronPython') exit(2) import System clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum')", "class MsSqlVersion(object): \"\"\" SQL Server patch migration class. \"\"\" class", "record that file has been executed\"\"\" now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')", "to compare patch files :param record_files_only: Only file names will", "is used for logging :param stop_on_error: Stop execution on error,", "OKBLUE = '\\033[94m' OKGREEN = '\\033[92m' WARNING = '\\033[93m' FAIL", "os.path.join(os.path.normpath(self.patch_dir), file) with io.open(full_name, 'r', encoding='utf8') as sql_file: sql =", "parser.add_argument('--case-insensitive', '-ci', action='store_true', dest='case_insensitive', default=False, help='use case insensitive to compare", "not exist') if 'mssql' not in connection_string: raise Exception('Wrong connection", "help='displays pending script files with no execution and fills patch", "directory\"\"\" _exclude_list = set(exclude_list) if not self.case_insensitive else [f.lower() for", "Common __author__ = '<NAME>' __copyright__ = 'Copyright (c) 2015-2016, Saritasa'", "tool', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example: %(prog)s -c \"mssql://sa:123@host\\instance/database\" -d \"D:/1/project/patch\"''') parser.add_argument('--connection', '-c',", "self.case_insensitive = case_insensitive self.record_files_only = record_files_only self.executed_count = 0 self.logger", "folder paths') parser.add_argument('--case-insensitive', '-ci', action='store_true', dest='case_insensitive', default=False, help='use case insensitive", "(file,)) message = e.message or e if e.clsException.InnerException is not", "= 90 self.exclude_pattern = exclude_pattern self.patch_dir = patch_dir self.stop_on_error =", "values(\\'%s\\', \\'%s\\');' % (file, now) self.database.ExecuteNonQuery(sql) def get_applied_patches(self): rows =", "import System clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import Microsoft.SqlServer.Management.Smo as Smo import", "control utility for Microsoft SQL Server. See README.md for more", "self.exclude_pattern = exclude_pattern self.patch_dir = patch_dir self.stop_on_error = stop_on_error self.case_insensitive", "table' % (self.database.Name,)) def __del__(self): if self.server: self.server.ConnectionContext.Disconnect() def update(self):", "AND type in (\\'U\\');' exists = database.ExecuteWithResults(sql).Tables[0].Rows.Count > 0 if", "dest='connection', action='store', help='connection string in rfc1738 url format, required') parser.add_argument('--directory',", "\"\"\" import argparse import datetime import io import logging import", "e if e.clsException.InnerException is not None and e.clsException.InnerException.InnerException is not", "url format :param patch_dir: Patch directory with .sql files :param", "def get_applied_patches(self): rows = self.database.ExecuteWithResults('select name from [_patch_history];').Tables[0].Rows return set([row['name']", "parser = get_cmd_line_parser() parser_args = parser.parse_args() if parser_args.connection is None", "ImportError: print('Cannot import crl module, make sure you run this", "and re.search(self.exclude_pattern, file))): continue sql_files.append(file) os.chdir(prevdir) return sql_files @staticmethod def", "Licensed under the BSD license. See LICENSE file in the", "required=True, dest='connection', action='store', help='connection string in rfc1738 url format, required')", "file))): continue sql_files.append(file) os.chdir(prevdir) return sql_files @staticmethod def _create_patch_table_if_not_exists(database): \"\"\"Create", "message)) ret = False return ret def put_patch(self, file): \"\"\"Write", "self.patch_dir = patch_dir self.stop_on_error = stop_on_error self.case_insensitive = case_insensitive self.record_files_only", "datetime import io import logging import os import re import", "__copyright__ = 'Copyright (c) 2015-2016, Saritasa' __license__ = 'BSD' __version__", "and self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING + 'Execution stopped. Please fix errors and", "import Microsoft.SqlServer.Management.Common as Common __author__ = '<NAME>' __copyright__ = 'Copyright", "patches: success = self.execute_file(patch) if success: self.executed_count += 1 self.put_patch(patch)", "create table [_patch_history] (id int not null identity(1, 1), name", "= parser.parse_args() if parser_args.connection is None or parser_args.directory is None:", "if not is_local_login: self.connection.Login = url.username self.connection.Password = <PASSWORD> self.server", "patches: self.logger.info('Add file %s' % (patch,)) self.put_patch(patch) def get_pending_patches(self): applied_patches", "prevdir = os.getcwd() os.chdir(self.patch_dir) sql_files = [] for root, dirs,", "compare patch files so \"PatchName.sql\" and \"patchname.sql\" is the same')", "update(self): \"\"\"Executes database update process\"\"\" patches = self.get_pending_patches() self.logger.debug('Files to", "for Microsoft SQL Server. See README.md for more information. Licensed", "= os.path.basename(_file) if (_file in _exclude_list or not _file.lower().endswith('.sql') or", "(self.database.Name,)) def __del__(self): if self.server: self.server.ConnectionContext.Disconnect() def update(self): \"\"\"Executes database", "not exists: sql = \"\"\" create table [_patch_history] (id int", "parser if __name__ == '__main__': # parser parser = get_cmd_line_parser()", "See README.md for more information. Licensed under the BSD license.", "logger: Logger that is used for logging :param stop_on_error: Stop", "sql_file: sql = sql_file.read() self.logger.info('[%s] Executing %s...' % (self.database.Name, file))", "patch_dir='.', exclude_pattern=None, logger=None, stop_on_error=False, noexecute=False, case_insensitive=False, record_files_only=False): \"\"\" Initialize instance", "-c \"mssql://sa:123@host\\instance/database\" -d \"D:/1/project/patch\"''') parser.add_argument('--connection', '-c', required=True, dest='connection', action='store', help='connection", "in rfc1738 url format, required') parser.add_argument('--directory', '-d', dest='directory', action='store', default='.',", "you run this script using IronPython') exit(2) import System clr.AddReference('Microsoft.SqlServer.Smo')", "with no execution and fills patch table') parser.add_argument('--stop-on-error', '-soe', action='store_true',", "(%s)' % (self.database.Name, full_name, message)) ret = False return ret", "script files with no execution') parser.add_argument('--noexecute-fill', '-nf', action='store_true', dest='noexecute_fill', default=False,", "and e.clsException.InnerException.InnerException is not None: message += ' ' +", "self._create_patch_table_if_not_exists(self.database) if not exists: self.logger.info('[%s] created _patch_history table' % (self.database.Name,))", "sqlvc = MsSqlVersion(parser_args.connection, parser_args.directory, exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only, logger=logger) if", "full license information. \"\"\" import argparse import datetime import io", "if self.record_files_only: _file = os.path.basename(_file) if (_file in _exclude_list or", "self.server.ConnectionContext.Disconnect() def update(self): \"\"\"Executes database update process\"\"\" patches = self.get_pending_patches()", "utility for Microsoft SQL Server. See README.md for more information.", "urlparse try: import clr except ImportError: print('Cannot import crl module,", "in os.walk('.'): for file in files: file = os.path.normpath(os.path.join(root, file))", "__version__ = '1.4.5' __all__ = ['MsSqlVersion'] class ScriptExecutionError(Exception): pass class", "'Execution stopped. Please fix errors and try again.' + MsSqlVersion.bcolors.ENDC)", "continue sql_files.append(file) os.chdir(prevdir) return sql_files @staticmethod def _create_patch_table_if_not_exists(database): \"\"\"Create patch", "execution if any script fails') parser.add_argument('--exclude-pattern', '-ep', dest='exclude_pattern', help='skips files", "%s...' % (self.database.Name, file)) self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction() except Exception as", "url.username self.connection = Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname, DatabaseName=url.path.replace('/', '')) if not is_local_login:", "parser.add_argument('--stop-on-error', '-soe', action='store_true', dest='stop_on_error', default=False, help='stops execution if any script", "parser.add_argument('--connection', '-c', required=True, dest='connection', action='store', help='connection string in rfc1738 url", "url.username self.connection.Password = <PASSWORD> self.server = Smo.Server(self.connection) self.database = self.server.Databases[self.connection.DatabaseName]", "def __del__(self): if self.server: self.server.ConnectionContext.Disconnect() def update(self): \"\"\"Executes database update", "in exclude_list] prevdir = os.getcwd() os.chdir(self.patch_dir) sql_files = [] for", "on error, default behavior is to continue :param case_insensitive: Use", "\"\"\"Executes database update process\"\"\" patches = self.get_pending_patches() self.logger.debug('Files to execute", "add constraint _patch_history_PK primary key clustered (id); \"\"\" database.ExecuteNonQuery(sql) return", "patch files so \"PatchName.sql\" and \"patchname.sql\" is the same') parser.add_argument('--debug',", "= os.path.normpath(os.path.join(root, file)) _file = file if self.case_insensitive: _file =", "% (self.database.Name,)) def __del__(self): if self.server: self.server.ConnectionContext.Disconnect() def update(self): \"\"\"Executes", "__del__(self): if self.server: self.server.ConnectionContext.Disconnect() def update(self): \"\"\"Executes database update process\"\"\"", "exists\"\"\" sql = 'select * from sys.objects where object_id =", "help='use case insensitive to compare patch files so \"PatchName.sql\" and", "sys import urlparse try: import clr except ImportError: print('Cannot import", "(\\'U\\');' exists = database.ExecuteWithResults(sql).Tables[0].Rows.Count > 0 if not exists: sql", "\"PatchName.sql\" and \"patchname.sql\" is the same') parser.add_argument('--debug', action='store_true', dest='debug', default=False,", "_patch_history_PK primary key clustered (id); \"\"\" database.ExecuteNonQuery(sql) return exists def", "self.get_pending_patches() for patch in patches: self.logger.info('Add file %s' % (patch,))", "for row in rows]) def _get_sql_files_from_dir(self, exclude_list=[]): \"\"\"Get all script", "re import sys import urlparse try: import clr except ImportError:", "None or parser_args.directory is None: parser.print_help() exit(1) # logging logger", "regular expression') parser.add_argument('--record-files-only', '-rfo', action='store_true', dest='record_files_only', default=False, help='only file names", "file = os.path.basename(file) sql = 'insert [_patch_history] (name, applied_at) values(\\'%s\\',", "~~~~~~~ Database version control utility for Microsoft SQL Server. See", "exists def get_cmd_line_parser(): \"\"\"Get initialized argparse.ArgumentParser object\"\"\" parser = argparse.ArgumentParser(", "full_name = os.path.join(os.path.normpath(self.patch_dir), file) with io.open(full_name, 'r', encoding='utf8') as sql_file:", "sql = 'insert [_patch_history] (name, applied_at) values(\\'%s\\', \\'%s\\');' % (file,", "action='store_true', dest='case_insensitive', default=False, help='use case insensitive to compare patch files", "self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING + 'Execution stopped. Please fix errors and try", "logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.addHandler(fh) ch = logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s]", "that file has been executed\"\"\" now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') if", "+ MsSqlVersion.bcolors.ENDC) raise ScriptExecutionError() self.logger.info('[%s] Executed %d patch(-es)' % (self.database.Name,", "not exists: self.logger.info('[%s] created _patch_history table' % (self.database.Name,)) def __del__(self):", "same') parser.add_argument('--debug', action='store_true', dest='debug', default=False, help='enables debug output') parser.add_argument('--version', '-v',", "for f in applied_patches] patches = self._get_sql_files_from_dir(applied_patches) patches.sort() return patches", "to continue :param case_insensitive: Use case insensitive to compare patch", "'-n', action='store_true', dest='noexecute', default=False, help='displays pending script files with no", "'insert [_patch_history] (name, applied_at) values(\\'%s\\', \\'%s\\');' % (file, now) self.database.ExecuteNonQuery(sql)", "-d \"D:/1/project/patch\"''') parser.add_argument('--connection', '-c', required=True, dest='connection', action='store', help='connection string in", "files match to regular expression') parser.add_argument('--record-files-only', '-rfo', action='store_true', dest='record_files_only', default=False,", "\"\"\"Get initialized argparse.ArgumentParser object\"\"\" parser = argparse.ArgumentParser( description='MSSQL database patch", "project root for full license information. \"\"\" import argparse import", "get_pending_patches(self): applied_patches = self.get_applied_patches() if self.record_files_only: applied_patches = [os.path.basename(f) for", "file if self.case_insensitive: _file = _file.lower() if self.record_files_only: _file =", "help='directory with patch files') parser.add_argument('--log', '-l', dest='log', action='store', help='log file')", "applied_at) values(\\'%s\\', \\'%s\\');' % (file, now) self.database.ExecuteNonQuery(sql) def get_applied_patches(self): rows", "execution on error, default behavior is to continue :param case_insensitive:", "exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only, logger=logger) if parser_args.noexecute: for patch in", "else logger if not os.path.exists(patch_dir): raise Exception('Patch folder does not", "without folder paths \"\"\" url = urlparse.urlparse(connection_string) is_local_login = not", "2015-2016, Saritasa' __license__ = 'BSD' __version__ = '1.4.5' __all__ =", "patch_dir self.stop_on_error = stop_on_error self.case_insensitive = case_insensitive self.record_files_only = record_files_only", "'r', encoding='utf8') as sql_file: sql = sql_file.read() self.logger.info('[%s] Executing %s...'", "alter table [_patch_history] add constraint _patch_history_PK primary key clustered (id);", "if parser_args.connection is None or parser_args.directory is None: parser.print_help() exit(1)", "dest='noexecute_fill', default=False, help='displays pending script files with no execution and", "crl module, make sure you run this script using IronPython')", "logger.addHandler(ch) # database handle sqlvc = MsSqlVersion(parser_args.connection, parser_args.directory, exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error,", "\"\"\" SQL Server patch migration class. \"\"\" class bcolors: OKBLUE", "<reponame>Saritasa/mssqlvc<filename>mssqlvc.py # -*- coding: utf-8 -*- \"\"\" mssqlvc ~~~~~~~ Database", "files in os.walk('.'): for file in files: file = os.path.normpath(os.path.join(root,", "if parser_args.noexecute: for patch in sqlvc.get_pending_patches(): logger.info(' ' + patch)", "as Smo import Microsoft.SqlServer.Management.Common as Common __author__ = '<NAME>' __copyright__", "not in connection_string: raise Exception('Wrong connection string, it should contain", "file)) _file = file if self.case_insensitive: _file = _file.lower() if", ":param exclude_pattern: String with regular expression the patch files should", "self.logger = logging.NullHandler() if not logger else logger if not", "Microsoft SQL Server. See README.md for more information. Licensed under", "parser_args.connection is None or parser_args.directory is None: parser.print_help() exit(1) #", "object_id(\\'_patch_history\\') AND type in (\\'U\\');' exists = database.ExecuteWithResults(sql).Tables[0].Rows.Count > 0", "patch migration class. \"\"\" class bcolors: OKBLUE = '\\033[94m' OKGREEN", "_get_sql_files_from_dir(self, exclude_list=[]): \"\"\"Get all script files from directory\"\"\" _exclude_list =", "parser.add_argument('--exclude-pattern', '-ep', dest='exclude_pattern', help='skips files match to regular expression') parser.add_argument('--record-files-only',", "action='version', version='%(prog)s ' + __version__) return parser if __name__ ==", "again.' + MsSqlVersion.bcolors.ENDC) raise ScriptExecutionError() self.logger.info('[%s] Executed %d patch(-es)' %", "__all__ = ['MsSqlVersion'] class ScriptExecutionError(Exception): pass class MsSqlVersion(object): \"\"\" SQL", "table') parser.add_argument('--stop-on-error', '-soe', action='store_true', dest='stop_on_error', default=False, help='stops execution if any", "return parser if __name__ == '__main__': # parser parser =", "LICENSE file in the project root for full license information.", "sql_files.append(file) os.chdir(prevdir) return sql_files @staticmethod def _create_patch_table_if_not_exists(database): \"\"\"Create patch table", "returns True if success\"\"\" ret = True try: full_name =", "and database objects. :param connection_string: Connection string in rfc1738 url", "file)) self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction() except Exception as e: self.connection.RollBackTransaction() self.logger.error('Exception", "in the project root for full license information. \"\"\" import", "Microsoft.SqlServer.Management.Common as Common __author__ = '<NAME>' __copyright__ = 'Copyright (c)", ":param record_files_only: Only file names will be stored to patch", "self.database = self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout = 90 self.exclude_pattern = exclude_pattern self.patch_dir", "self.database.ExecuteNonQuery(sql) def get_applied_patches(self): rows = self.database.ExecuteWithResults('select name from [_patch_history];').Tables[0].Rows return", "\"mssql://sa:123@host\\instance/database\" -d \"D:/1/project/patch\"''') parser.add_argument('--connection', '-c', required=True, dest='connection', action='store', help='connection string", "__name__ == '__main__': # parser parser = get_cmd_line_parser() parser_args =", "case insensitive to compare patch files so \"PatchName.sql\" and \"patchname.sql\"", "def put_patch(self, file): \"\"\"Write record that file has been executed\"\"\"", "None and e.clsException.InnerException.InnerException is not None: message += ' '", "names will be stored to patch table without folder paths", "parser parser = get_cmd_line_parser() parser_args = parser.parse_args() if parser_args.connection is", "if parser_args.log: fh = logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.addHandler(fh) ch", "with io.open(full_name, 'r', encoding='utf8') as sql_file: sql = sql_file.read() self.logger.info('[%s]", "it should contain mssql word') exists = self._create_patch_table_if_not_exists(self.database) if not", "= e.message or e if e.clsException.InnerException is not None and", "(_file in _exclude_list or not _file.lower().endswith('.sql') or (self.exclude_pattern and re.search(self.exclude_pattern,", "int not null identity(1, 1), name varchar(100) not null, applied_at", "in patches: self.logger.info('Add file %s' % (patch,)) self.put_patch(patch) def get_pending_patches(self):", "be stored to patch table without folder paths') parser.add_argument('--case-insensitive', '-ci',", "self.connection.CommitTransaction() except Exception as e: self.connection.RollBackTransaction() self.logger.error('Exception on %s' %", "version='%(prog)s ' + __version__) return parser if __name__ == '__main__':", "patches.sort() return patches def execute_file(self, file): \"\"\"Executes file against database", "applied_at datetime not null); alter table [_patch_history] add constraint _patch_history_PK", "stop_on_error: Stop execution on error, default behavior is to continue", "for full license information. \"\"\" import argparse import datetime import", "self.execute_file(patch) if success: self.executed_count += 1 self.put_patch(patch) if not success", "file names will be stored to patch table without folder", "e.clsException.InnerException.InnerException.Message self.logger.error('[%s] %s (%s)' % (self.database.Name, full_name, message)) ret =", "key clustered (id); \"\"\" database.ExecuteNonQuery(sql) return exists def get_cmd_line_parser(): \"\"\"Get", "description='MSSQL database patch history tool', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example: %(prog)s -c \"mssql://sa:123@host\\instance/database\"", "raise Exception('Wrong connection string, it should contain mssql word') exists", "for patch in patches: success = self.execute_file(patch) if success: self.executed_count", "\"\"\" mssqlvc ~~~~~~~ Database version control utility for Microsoft SQL", "names will be stored to patch table without folder paths')", "% (patch,)) self.put_patch(patch) def get_pending_patches(self): applied_patches = self.get_applied_patches() if self.record_files_only:", "= logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.addHandler(fh) ch = logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s", "exclude_pattern=None, logger=None, stop_on_error=False, noexecute=False, case_insensitive=False, record_files_only=False): \"\"\" Initialize instance with", "dest='exclude_pattern', help='skips files match to regular expression') parser.add_argument('--record-files-only', '-rfo', action='store_true',", "os.path.normpath(os.path.join(root, file)) _file = file if self.case_insensitive: _file = _file.lower()", "not logger else logger if not os.path.exists(patch_dir): raise Exception('Patch folder", "help='log file') parser.add_argument('--noexecute', '-n', action='store_true', dest='noexecute', default=False, help='displays pending script", "utf-8 -*- \"\"\" mssqlvc ~~~~~~~ Database version control utility for", "dest='directory', action='store', default='.', help='directory with patch files') parser.add_argument('--log', '-l', dest='log',", "or not _file.lower().endswith('.sql') or (self.exclude_pattern and re.search(self.exclude_pattern, file))): continue sql_files.append(file)", "exist') if 'mssql' not in connection_string: raise Exception('Wrong connection string,", "them to patches table\"\"\" patches = self.get_pending_patches() for patch in", "clustered (id); \"\"\" database.ExecuteNonQuery(sql) return exists def get_cmd_line_parser(): \"\"\"Get initialized", "in transaction, returns True if success\"\"\" ret = True try:", "self.put_patch(patch) if not success and self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING + 'Execution stopped.", "= False return ret def put_patch(self, file): \"\"\"Write record that", "database in transaction, returns True if success\"\"\" ret = True", "= MsSqlVersion(parser_args.connection, parser_args.directory, exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only, logger=logger) if parser_args.noexecute:", "files so \"PatchName.sql\" and \"patchname.sql\" is the same') parser.add_argument('--debug', action='store_true',", "logging.NullHandler() if not logger else logger if not os.path.exists(patch_dir): raise", "case insensitive to compare patch files :param record_files_only: Only file", "os.chdir(self.patch_dir) sql_files = [] for root, dirs, files in os.walk('.'):", "patch table') parser.add_argument('--stop-on-error', '-soe', action='store_true', dest='stop_on_error', default=False, help='stops execution if", "null); alter table [_patch_history] add constraint _patch_history_PK primary key clustered", "any script fails') parser.add_argument('--exclude-pattern', '-ep', dest='exclude_pattern', help='skips files match to", "% (self.database.Name, self.executed_count)) def fill(self): \"\"\"Skip scripts execution but add", "default=False, help='enables debug output') parser.add_argument('--version', '-v', action='version', version='%(prog)s ' +", "clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import Microsoft.SqlServer.Management.Smo as Smo import Microsoft.SqlServer.Management.Common as Common __author__", "will be stored to patch table without folder paths') parser.add_argument('--case-insensitive',", "%(prog)s -c \"mssql://sa:123@host\\instance/database\" -d \"D:/1/project/patch\"''') parser.add_argument('--connection', '-c', required=True, dest='connection', action='store',", "match to regular expression') parser.add_argument('--record-files-only', '-rfo', action='store_true', dest='record_files_only', default=False, help='only", "in applied_patches] patches = self._get_sql_files_from_dir(applied_patches) patches.sort() return patches def execute_file(self,", "database patch history tool', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example: %(prog)s -c \"mssql://sa:123@host\\instance/database\" -d", "file %s' % (patch,)) self.put_patch(patch) def get_pending_patches(self): applied_patches = self.get_applied_patches()", "= stop_on_error self.case_insensitive = case_insensitive self.record_files_only = record_files_only self.executed_count =", "'select * from sys.objects where object_id = object_id(\\'_patch_history\\') AND type", "patches = self.get_pending_patches() self.logger.debug('Files to execute %s' % (patches,)) for", "files from directory\"\"\" _exclude_list = set(exclude_list) if not self.case_insensitive else", "% (file, now) self.database.ExecuteNonQuery(sql) def get_applied_patches(self): rows = self.database.ExecuteWithResults('select name", ":param case_insensitive: Use case insensitive to compare patch files :param", "connection_string: raise Exception('Wrong connection string, it should contain mssql word')", "self.connection.Password = <PASSWORD> self.server = Smo.Server(self.connection) self.database = self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout", "dest='record_files_only', default=False, help='only file names will be stored to patch", "= 'BSD' __version__ = '1.4.5' __all__ = ['MsSqlVersion'] class ScriptExecutionError(Exception):", "file in the project root for full license information. \"\"\"", "applied_patches] patches = self._get_sql_files_from_dir(applied_patches) patches.sort() return patches def execute_file(self, file):", "90 self.exclude_pattern = exclude_pattern self.patch_dir = patch_dir self.stop_on_error = stop_on_error", "\\'%s\\');' % (file, now) self.database.ExecuteNonQuery(sql) def get_applied_patches(self): rows = self.database.ExecuteWithResults('select", "more information. Licensed under the BSD license. See LICENSE file", "dest='debug', default=False, help='enables debug output') parser.add_argument('--version', '-v', action='version', version='%(prog)s '", "if success\"\"\" ret = True try: full_name = os.path.join(os.path.normpath(self.patch_dir), file)", "to execute %s' % (patches,)) for patch in patches: success", "with .sql files :param exclude_pattern: String with regular expression the", "except ImportError: print('Cannot import crl module, make sure you run", "= 'insert [_patch_history] (name, applied_at) values(\\'%s\\', \\'%s\\');' % (file, now)", "if self.case_insensitive: _file = _file.lower() if self.record_files_only: _file = os.path.basename(_file)", "noexecute=False, case_insensitive=False, record_files_only=False): \"\"\" Initialize instance with connection and database", "regular expression the patch files should match :param logger: Logger", "to regular expression') parser.add_argument('--record-files-only', '-rfo', action='store_true', dest='record_files_only', default=False, help='only file", "= os.getcwd() os.chdir(self.patch_dir) sql_files = [] for root, dirs, files", "= logging.NullHandler() if not logger else logger if not os.path.exists(patch_dir):", "Smo import Microsoft.SqlServer.Management.Common as Common __author__ = '<NAME>' __copyright__ =", "[%(levelname)s] %(message)s')) logger.addHandler(fh) ch = logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.setLevel(logging.DEBUG", "os.getcwd() os.chdir(self.patch_dir) sql_files = [] for root, dirs, files in", ":param logger: Logger that is used for logging :param stop_on_error:", "%s' % (patches,)) for patch in patches: success = self.execute_file(patch)", "patch files :param record_files_only: Only file names will be stored", "compare patch files :param record_files_only: Only file names will be", "self.record_files_only: _file = os.path.basename(_file) if (_file in _exclude_list or not", "= argparse.ArgumentParser( description='MSSQL database patch history tool', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example: %(prog)s", "help='stops execution if any script fails') parser.add_argument('--exclude-pattern', '-ep', dest='exclude_pattern', help='skips", "class bcolors: OKBLUE = '\\033[94m' OKGREEN = '\\033[92m' WARNING =", "table without folder paths \"\"\" url = urlparse.urlparse(connection_string) is_local_login =", "not os.path.exists(patch_dir): raise Exception('Patch folder does not exist') if 'mssql'", "applied_patches = [os.path.basename(f) for f in applied_patches] patches = self._get_sql_files_from_dir(applied_patches)", "fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.addHandler(fh) ch = logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))", "information. Licensed under the BSD license. See LICENSE file in", "Exception('Wrong connection string, it should contain mssql word') exists =", "contain mssql word') exists = self._create_patch_table_if_not_exists(self.database) if not exists: self.logger.info('[%s]", "True if success\"\"\" ret = True try: full_name = os.path.join(os.path.normpath(self.patch_dir),", "license. See LICENSE file in the project root for full", "as sql_file: sql = sql_file.read() self.logger.info('[%s] Executing %s...' % (self.database.Name,", "action='store_true', dest='noexecute_fill', default=False, help='displays pending script files with no execution", "execution and fills patch table') parser.add_argument('--stop-on-error', '-soe', action='store_true', dest='stop_on_error', default=False,", "information. \"\"\" import argparse import datetime import io import logging", "sure you run this script using IronPython') exit(2) import System", "success and self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING + 'Execution stopped. Please fix errors", "put_patch(self, file): \"\"\"Write record that file has been executed\"\"\" now", "= file if self.case_insensitive: _file = _file.lower() if self.record_files_only: _file", "sql = 'select * from sys.objects where object_id = object_id(\\'_patch_history\\')", "= database.ExecuteWithResults(sql).Tables[0].Rows.Count > 0 if not exists: sql = \"\"\"", "self.logger.info('[%s] Executing %s...' % (self.database.Name, file)) self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction() except", "patch table without folder paths \"\"\" url = urlparse.urlparse(connection_string) is_local_login", "os.walk('.'): for file in files: file = os.path.normpath(os.path.join(root, file)) _file", "= self.get_applied_patches() if self.record_files_only: applied_patches = [os.path.basename(f) for f in", "or (self.exclude_pattern and re.search(self.exclude_pattern, file))): continue sql_files.append(file) os.chdir(prevdir) return sql_files", "re.search(self.exclude_pattern, file))): continue sql_files.append(file) os.chdir(prevdir) return sql_files @staticmethod def _create_patch_table_if_not_exists(database):", "argparse.ArgumentParser object\"\"\" parser = argparse.ArgumentParser( description='MSSQL database patch history tool',", "self.stop_on_error = stop_on_error self.case_insensitive = case_insensitive self.record_files_only = record_files_only self.executed_count", "for patch in sqlvc.get_pending_patches(): logger.info(' ' + patch) elif parser_args.noexecute_fill:", "exists: self.logger.info('[%s] created _patch_history table' % (self.database.Name,)) def __del__(self): if", "= '\\033[92m' WARNING = '\\033[93m' FAIL = '\\033[91m' ENDC =", "logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.setLevel(logging.DEBUG if parser_args.debug else logging.INFO) logger.addHandler(ch)", "logging logger = logging.getLogger('mssql') if parser_args.log: fh = logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s", "exclude_pattern self.patch_dir = patch_dir self.stop_on_error = stop_on_error self.case_insensitive = case_insensitive", "with connection and database objects. :param connection_string: Connection string in", "\"\"\"Write record that file has been executed\"\"\" now = datetime.datetime.now().strftime('%Y-%m-%d", "script files with no execution and fills patch table') parser.add_argument('--stop-on-error',", "to patches table\"\"\" patches = self.get_pending_patches() for patch in patches:", "+ e.clsException.InnerException.InnerException.Message self.logger.error('[%s] %s (%s)' % (self.database.Name, full_name, message)) ret", "files :param record_files_only: Only file names will be stored to", "from sys.objects where object_id = object_id(\\'_patch_history\\') AND type in (\\'U\\');'", "'-soe', action='store_true', dest='stop_on_error', default=False, help='stops execution if any script fails')", "Patch directory with .sql files :param exclude_pattern: String with regular", "= Smo.Server(self.connection) self.database = self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout = 90 self.exclude_pattern =", "= logging.getLogger('mssql') if parser_args.log: fh = logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))", "parser_args.log: fh = logging.FileHandler(parser_args.log) fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.addHandler(fh) ch =", "%s' % (file,)) message = e.message or e if e.clsException.InnerException", ".sql files :param exclude_pattern: String with regular expression the patch", "0 self.logger = logging.NullHandler() if not logger else logger if", "patch history tool', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Example: %(prog)s -c \"mssql://sa:123@host\\instance/database\" -d \"D:/1/project/patch\"''')", "database handle sqlvc = MsSqlVersion(parser_args.connection, parser_args.directory, exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only,", "# -*- coding: utf-8 -*- \"\"\" mssqlvc ~~~~~~~ Database version", "parser_args.directory, exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only, logger=logger) if parser_args.noexecute: for patch", "[f.lower() for f in exclude_list] prevdir = os.getcwd() os.chdir(self.patch_dir) sql_files", "class. \"\"\" class bcolors: OKBLUE = '\\033[94m' OKGREEN = '\\033[92m'", "self.server = Smo.Server(self.connection) self.database = self.server.Databases[self.connection.DatabaseName] self.server.ConnectionContext.ConnectTimeout = 90 self.exclude_pattern", "database update process\"\"\" patches = self.get_pending_patches() self.logger.debug('Files to execute %s'", "= _file.lower() if self.record_files_only: _file = os.path.basename(_file) if (_file in", "record_files_only=parser_args.record_files_only, logger=logger) if parser_args.noexecute: for patch in sqlvc.get_pending_patches(): logger.info(' '", "not null identity(1, 1), name varchar(100) not null, applied_at datetime", "coding: utf-8 -*- \"\"\" mssqlvc ~~~~~~~ Database version control utility", "with no execution') parser.add_argument('--noexecute-fill', '-nf', action='store_true', dest='noexecute_fill', default=False, help='displays pending", "self.logger.info('[%s] created _patch_history table' % (self.database.Name,)) def __del__(self): if self.server:", "the patch files should match :param logger: Logger that is", "name from [_patch_history];').Tables[0].Rows return set([row['name'] for row in rows]) def", "using IronPython') exit(2) import System clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import Microsoft.SqlServer.Management.Smo", "self.put_patch(patch) def get_pending_patches(self): applied_patches = self.get_applied_patches() if self.record_files_only: applied_patches =", "is not None: message += ' ' + e.clsException.InnerException.InnerException.Message self.logger.error('[%s]", "if success: self.executed_count += 1 self.put_patch(patch) if not success and", "parser.add_argument('--noexecute-fill', '-nf', action='store_true', dest='noexecute_fill', default=False, help='displays pending script files with", "\"\"\" Initialize instance with connection and database objects. :param connection_string:", "return exists def get_cmd_line_parser(): \"\"\"Get initialized argparse.ArgumentParser object\"\"\" parser =", "not None: message += ' ' + e.clsException.InnerException.InnerException.Message self.logger.error('[%s] %s", "file has been executed\"\"\" now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') if self.record_files_only:", "action='store', help='log file') parser.add_argument('--noexecute', '-n', action='store_true', dest='noexecute', default=False, help='displays pending", "help='only file names will be stored to patch table without", "now) self.database.ExecuteNonQuery(sql) def get_applied_patches(self): rows = self.database.ExecuteWithResults('select name from [_patch_history];').Tables[0].Rows", "in (\\'U\\');' exists = database.ExecuteWithResults(sql).Tables[0].Rows.Count > 0 if not exists:", "= self._create_patch_table_if_not_exists(self.database) if not exists: self.logger.info('[%s] created _patch_history table' %", "Stop execution on error, default behavior is to continue :param", "# parser parser = get_cmd_line_parser() parser_args = parser.parse_args() if parser_args.connection", "% (self.database.Name, file)) self.connection.BeginTransaction() self.database.ExecuteNonQuery(sql) self.connection.CommitTransaction() except Exception as e:", "= self.execute_file(patch) if success: self.executed_count += 1 self.put_patch(patch) if not", "type in (\\'U\\');' exists = database.ExecuteWithResults(sql).Tables[0].Rows.Count > 0 if not", "license information. \"\"\" import argparse import datetime import io import", "clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import Microsoft.SqlServer.Management.Smo as Smo import Microsoft.SqlServer.Management.Common as", "return set([row['name'] for row in rows]) def _get_sql_files_from_dir(self, exclude_list=[]): \"\"\"Get", "'BSD' __version__ = '1.4.5' __all__ = ['MsSqlVersion'] class ScriptExecutionError(Exception): pass", "Only file names will be stored to patch table without", "Common.ServerConnection(LoginSecure=is_local_login, ServerInstance=url.hostname, DatabaseName=url.path.replace('/', '')) if not is_local_login: self.connection.Login = url.username", "IronPython') exit(2) import System clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import Microsoft.SqlServer.Management.Smo as", "if e.clsException.InnerException is not None and e.clsException.InnerException.InnerException is not None:", "for root, dirs, files in os.walk('.'): for file in files:", "the project root for full license information. \"\"\" import argparse", "= url.username self.connection.Password = <PASSWORD> self.server = Smo.Server(self.connection) self.database =", "1 self.put_patch(patch) if not success and self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING + 'Execution", "exit(2) import System clr.AddReference('Microsoft.SqlServer.Smo') clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import Microsoft.SqlServer.Management.Smo as Smo", "= '\\033[0m' BOLD = '\\033[1m' def __init__(self, connection_string, patch_dir='.', exclude_pattern=None,", "is None or parser_args.directory is None: parser.print_help() exit(1) # logging", "case_insensitive self.record_files_only = record_files_only self.executed_count = 0 self.logger = logging.NullHandler()", "expression the patch files should match :param logger: Logger that", "logger.addHandler(fh) ch = logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.setLevel(logging.DEBUG if parser_args.debug", "identity(1, 1), name varchar(100) not null, applied_at datetime not null);", "ENDC = '\\033[0m' BOLD = '\\033[1m' def __init__(self, connection_string, patch_dir='.',", "get_applied_patches(self): rows = self.database.ExecuteWithResults('select name from [_patch_history];').Tables[0].Rows return set([row['name'] for", "success: self.executed_count += 1 self.put_patch(patch) if not success and self.stop_on_error:", "= '\\033[93m' FAIL = '\\033[91m' ENDC = '\\033[0m' BOLD =", "that is used for logging :param stop_on_error: Stop execution on", "if not success and self.stop_on_error: self.logger.critical(MsSqlVersion.bcolors.WARNING + 'Execution stopped. Please", "logger=logger) if parser_args.noexecute: for patch in sqlvc.get_pending_patches(): logger.info(' ' +", "' + e.clsException.InnerException.InnerException.Message self.logger.error('[%s] %s (%s)' % (self.database.Name, full_name, message))", "in rfc1738 url format :param patch_dir: Patch directory with .sql", "'\\033[92m' WARNING = '\\033[93m' FAIL = '\\033[91m' ENDC = '\\033[0m'", "%H:%M:%S') if self.record_files_only: file = os.path.basename(file) sql = 'insert [_patch_history]", "in _exclude_list or not _file.lower().endswith('.sql') or (self.exclude_pattern and re.search(self.exclude_pattern, file))):", "help='skips files match to regular expression') parser.add_argument('--record-files-only', '-rfo', action='store_true', dest='record_files_only',", "['MsSqlVersion'] class ScriptExecutionError(Exception): pass class MsSqlVersion(object): \"\"\" SQL Server patch", "import logging import os import re import sys import urlparse", "as Common __author__ = '<NAME>' __copyright__ = 'Copyright (c) 2015-2016,", "= self.get_pending_patches() for patch in patches: self.logger.info('Add file %s' %", "None: parser.print_help() exit(1) # logging logger = logging.getLogger('mssql') if parser_args.log:", "rfc1738 url format, required') parser.add_argument('--directory', '-d', dest='directory', action='store', default='.', help='directory", "clr.AddReference('Microsoft.SqlServer.SqlEnum') clr.AddReference('Microsoft.SqlServer.ConnectionInfo') import Microsoft.SqlServer.Management.Smo as Smo import Microsoft.SqlServer.Management.Common as Common", "execute_file(self, file): \"\"\"Executes file against database in transaction, returns True", "\"\"\" database.ExecuteNonQuery(sql) return exists def get_cmd_line_parser(): \"\"\"Get initialized argparse.ArgumentParser object\"\"\"", "[os.path.basename(f) for f in applied_patches] patches = self._get_sql_files_from_dir(applied_patches) patches.sort() return", "if any script fails') parser.add_argument('--exclude-pattern', '-ep', dest='exclude_pattern', help='skips files match", "url = urlparse.urlparse(connection_string) is_local_login = not url.username self.connection = Common.ServerConnection(LoginSecure=is_local_login,", "handle sqlvc = MsSqlVersion(parser_args.connection, parser_args.directory, exclude_pattern=parser_args.exclude_pattern, stop_on_error=parser_args.stop_on_error, case_insensitive=parser_args.case_insensitive, record_files_only=parser_args.record_files_only, logger=logger)", "dest='noexecute', default=False, help='displays pending script files with no execution') parser.add_argument('--noexecute-fill',", "\"patchname.sql\" is the same') parser.add_argument('--debug', action='store_true', dest='debug', default=False, help='enables debug", "created _patch_history table' % (self.database.Name,)) def __del__(self): if self.server: self.server.ConnectionContext.Disconnect()", "epilog='''Example: %(prog)s -c \"mssql://sa:123@host\\instance/database\" -d \"D:/1/project/patch\"''') parser.add_argument('--connection', '-c', required=True, dest='connection',", "or parser_args.directory is None: parser.print_help() exit(1) # logging logger =", "= get_cmd_line_parser() parser_args = parser.parse_args() if parser_args.connection is None or", "-*- \"\"\" mssqlvc ~~~~~~~ Database version control utility for Microsoft", "not is_local_login: self.connection.Login = url.username self.connection.Password = <PASSWORD> self.server =", "= [os.path.basename(f) for f in applied_patches] patches = self._get_sql_files_from_dir(applied_patches) patches.sort()", "Exception as e: self.connection.RollBackTransaction() self.logger.error('Exception on %s' % (file,)) message", "= exclude_pattern self.patch_dir = patch_dir self.stop_on_error = stop_on_error self.case_insensitive =", "default=False, help='stops execution if any script fails') parser.add_argument('--exclude-pattern', '-ep', dest='exclude_pattern',", "= \"\"\" create table [_patch_history] (id int not null identity(1,", "ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')) logger.setLevel(logging.DEBUG if parser_args.debug else logging.INFO) logger.addHandler(ch) #", "'\\033[94m' OKGREEN = '\\033[92m' WARNING = '\\033[93m' FAIL = '\\033[91m'", "self.connection.RollBackTransaction() self.logger.error('Exception on %s' % (file,)) message = e.message or", "with patch files') parser.add_argument('--log', '-l', dest='log', action='store', help='log file') parser.add_argument('--noexecute',", "null identity(1, 1), name varchar(100) not null, applied_at datetime not" ]
[ "* header1 * header2 * ***************************** * stub1 * 0.00", "<td>2</td> <td>3.333</td> </tr> </table> \"\"\" #the previous has significant trailing", "#print len(actual), len(desired) assert_equal(actual, desired) def test_customlabel(self): # Limited test", "#print(desired) #print len(actual), len(desired) assert_equal(actual, desired) def test_customlabel(self): # Limited", "cell.data is np.nan: return 'missing' class TestCell(object): def test_celldata(self): celldata", "stub1 * -- * 1 * * stub2 * 2.00", "header_align = 'r', data_aligns = \"r\", stubs_align = \"l\", fmt", "\\n</tr>\\n<tr>\\n <th>stub2</th> <td>2</td> <td>3.333</td> \\n</tr>\\n</table>\\n''' actual = '\\n%s\\n' % tbl.as_html()", "custom labeling tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data =", "txt_fmt=txt_fmt1) tbl[1][1].data = np.nan tbl.label_cells(custom_labeller) #print([[c.datatype for c in row]", "' *', table_dec_above='*', table_dec_below='*', header_dec_below='*', header_fmt = '%s', stub_fmt =", "desired) def test_html_fmt1(self): # Limited test of custom html_fmt desired", "tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1, html_fmt=html_fmt1) def custom_labeller(cell):", "tbl.as_latex_tabular() #print(actual) #print(desired) assert_equal(actual, desired) def test_html_fmt1(self): # Limited test", "np.nan tbl.label_cells(custom_labeller) #print([[c.datatype for c in row] for row in", "test1header = ('header1', 'header2') #test1header = ('header1\\nheader1a', 'header2\\nheader2a') tbl =", "', row_pre = '* ', row_post = ' *', table_dec_above='*',", "* stub1 * -- * 1 * * stub2 *", "def test_customlabel(self): # Limited test of custom custom labeling tbl", "* -- * 1 * * stub2 * 2.00 *", "& \\textbf{header2} \\\\ \\midrule \\textbf{stub1} & 0.0 & 1 \\\\", "len(actual), len(desired) assert_equal(actual, desired) def test_customlabel(self): # Limited test of", "\\toprule & \\textbf{header1} & \\textbf{header2} \\\\ \\midrule \\textbf{stub1} & 0.0", "= '''\\n<table class=\"simpletable\">\\n<tr>\\n <td></td> <th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th> <td>0.0</td> <td>1</td> \\n</tr>\\n<tr>\\n", "<td></td> <th>header1</th> <th>header2</th> </tr> <tr> <th>stub1</th> <td>0.0</td> <td>1</td> </tr> <tr>", "* header2 * ***************************** * stub1 * 0.00 * 1", "= r\"\"\" \\begin{center} \\begin{tabular}{lcc} \\toprule & \\textbf{header1} & \\textbf{header2} \\\\", "= \"r\", stubs_align = \"l\", fmt = 'txt' ) cell0data", "import absolute_import from statsmodels.compat.python import zip import numpy as np", "data_aligns = \"r\", stubs_align = \"l\", fmt = 'txt' )", "import Cell, SimpleTable from statsmodels.iolib.table import default_latex_fmt from statsmodels.iolib.table import", "data_fmts = ['%0.2f', '%d'], empty_cell = ' ', colwidths =", "= \"l\", fmt = 'txt' ) cell0data = 0.0000 cell1data", "2.00 * 3 * ***************************** \"\"\" actual = '\\n%s\\n' %", "('header1\\nheader1a', 'header2\\nheader2a') tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1, html_fmt=html_fmt1)", "0.00 * 1 * * stub2 * 2.00 * 3", "0.0000 cell1data = 1 row0data = [cell0data, cell1data] row1data =", "***************************** * stub1 * 0.00 * 1 * * stub2", "<th>stub2</th> <td>2</td> <td>3.333</td> </tr> </table> \"\"\" #the previous has significant", "actual = '\\n'.join((line.rstrip() for line in actual.split('\\n'))) #print(actual) #print(desired) #print", "& 2 & 3.333 \\\\ \\bottomrule \\end{tabular} \\end{center} \"\"\" actual", "('stub1', 'stub2') test1header = ('header1', 'header2') #test1header = ('header1\\nheader1a', 'header2\\nheader2a')", "\"r\", stubs_align = \"l\", fmt = 'txt' ) cell0data =", "0.0 & 1 \\\\ \\textbf{stub2} & 2 & 3.333 \\\\", "\\begin{tabular}{lcc} \\toprule & \\textbf{header1} & \\textbf{header2} \\\\ \\midrule \\textbf{stub1} &", "actual = '\\n%s\\n' % tbl.as_text() #print('actual') #print(actual) #print('desired') #print(desired) assert_equal(actual,", "#print('actual') #print(actual) #print('desired') #print(desired) assert_equal(actual, desired) def test_ltx_fmt1(self): # Limited", "\\n</tr>\\n</table>\\n''' actual = '\\n%s\\n' % tbl.as_html() actual = '\\n'.join((line.rstrip() for", "tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data = np.nan tbl.label_cells(custom_labeller)", "cell, datum in zip(cells, celldata): assert_equal(cell.data, datum) class TestSimpleTable(object): def", "Limited test of custom txt_fmt desired = \"\"\" ***************************** *", "datum) class TestSimpleTable(object): def test_txt_fmt1(self): # Limited test of custom", "default_html_fmt ltx_fmt1 = default_latex_fmt.copy() html_fmt1 = default_html_fmt.copy() txt_fmt1 = dict(", "import zip import numpy as np from numpy.testing import assert_equal", "<tr> <th>stub1</th> <td>0.0</td> <td>1</td> </tr> <tr> <th>stub2</th> <td>2</td> <td>3.333</td> </tr>", "<tr> <th>stub2</th> <td>2</td> <td>3.333</td> </tr> </table> \"\"\" #the previous has", "[ row0data, row1data ] test1stubs = ('stub1', 'stub2') test1header =", "Cell, SimpleTable from statsmodels.iolib.table import default_latex_fmt from statsmodels.iolib.table import default_html_fmt", "', row_post = ' *', table_dec_above='*', table_dec_below='*', header_dec_below='*', header_fmt =", "& 3.333 \\\\ \\bottomrule \\end{tabular} \\end{center} \"\"\" actual = '\\n%s\\n'", "from statsmodels.iolib.table import default_html_fmt ltx_fmt1 = default_latex_fmt.copy() html_fmt1 = default_html_fmt.copy()", "for c in row] for row in tbl]) desired =", "header2 * ***************************** * stub1 * 0.00 * 1 *", "of custom txt_fmt desired = \"\"\" ***************************** * * header1", ":see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 ''' from __future__ import absolute_import from", "desired) def test_customlabel(self): # Limited test of custom custom labeling", "= SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1, html_fmt=html_fmt1) def custom_labeller(cell): if", "row_post = ' *', table_dec_above='*', table_dec_below='*', header_dec_below='*', header_fmt = '%s',", "* header2 * ***************************** * stub1 * -- * 1", "<tr> <td></td> <th>header1</th> <th>header2</th> </tr> <tr> <th>stub1</th> <td>0.0</td> <td>1</td> </tr>", "= '%s', stub_fmt = '%s', title_align='r', header_align = 'r', data_aligns", "tbl.label_cells(custom_labeller) #print([[c.datatype for c in row] for row in tbl])", "desired = r\"\"\" \\begin{center} \\begin{tabular}{lcc} \\toprule & \\textbf{header1} & \\textbf{header2}", "['%0.2f', '%d'], empty_cell = ' ', colwidths = 1, colsep='", "statsmodels.iolib.table import default_html_fmt ltx_fmt1 = default_latex_fmt.copy() html_fmt1 = default_html_fmt.copy() txt_fmt1", "import numpy as np from numpy.testing import assert_equal __docformat__ =", "#the previous has significant trailing whitespace that got removed #desired", "<th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th> <td>0.0</td> <td>1</td> \\n</tr>\\n<tr>\\n <th>stub2</th> <td>2</td> <td>3.333</td> \\n</tr>\\n</table>\\n'''", "#print('desired') #print(desired) assert_equal(actual, desired) def test_ltx_fmt1(self): # Limited test of", "#print(actual) #print(desired) #print len(actual), len(desired) assert_equal(actual, desired) def test_customlabel(self): #", "http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 ''' from __future__ import absolute_import from statsmodels.compat.python import zip", "= [ row0data, row1data ] test1stubs = ('stub1', 'stub2') test1header", "</tr> </table> \"\"\" #the previous has significant trailing whitespace that", "an intro to unittest :see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 ''' from", "colwidths = 1, colsep=' * ', row_pre = '* ',", "desired = \"\"\" <table class=\"simpletable\"> <tr> <td></td> <th>header1</th> <th>header2</th> </tr>", "html_fmt1 = default_html_fmt.copy() txt_fmt1 = dict( data_fmts = ['%0.2f', '%d'],", "* * stub2 * 2.00 * 3 * ***************************** \"\"\"", "***************************** * stub1 * -- * 1 * * stub2", "html_fmt desired = \"\"\" <table class=\"simpletable\"> <tr> <td></td> <th>header1</th> <th>header2</th>", "previous has significant trailing whitespace that got removed #desired =", "test1stubs = ('stub1', 'stub2') test1header = ('header1', 'header2') #test1header =", "in zip(cells, celldata): assert_equal(cell.data, datum) class TestSimpleTable(object): def test_txt_fmt1(self): #", "for an intro to unittest :see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 '''", "* 3 * ***************************** \"\"\" actual = '\\n%s\\n' % tbl.as_text(missing='--')", "<table class=\"simpletable\"> <tr> <td></td> <th>header1</th> <th>header2</th> </tr> <tr> <th>stub1</th> <td>0.0</td>", "\"\"\" #the previous has significant trailing whitespace that got removed", "\"\"\" actual = '\\n%s\\n' % tbl.as_latex_tabular() #print(actual) #print(desired) assert_equal(actual, desired)", "test1header, test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data = np.nan tbl.label_cells(custom_labeller) #print([[c.datatype for c", "table.py. :see: http://docs.python.org/lib/minimal-example.html for an intro to unittest :see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html", "<th>stub1</th> <td>0.0</td> <td>1</td> </tr> <tr> <th>stub2</th> <td>2</td> <td>3.333</td> </tr> </table>", "en\" from statsmodels.iolib.table import Cell, SimpleTable from statsmodels.iolib.table import default_latex_fmt", "tbl]) desired = \"\"\" ***************************** * * header1 * header2", "from statsmodels.iolib.table import default_latex_fmt from statsmodels.iolib.table import default_html_fmt ltx_fmt1 =", "absolute_import from statsmodels.compat.python import zip import numpy as np from", "'* ', row_post = ' *', table_dec_above='*', table_dec_below='*', header_dec_below='*', header_fmt", "'r', data_aligns = \"r\", stubs_align = \"l\", fmt = 'txt'", "row in tbl]) desired = \"\"\" ***************************** * * header1", "def test_txt_fmt1(self): # Limited test of custom txt_fmt desired =", "3.333 \\\\ \\bottomrule \\end{tabular} \\end{center} \"\"\" actual = '\\n%s\\n' %", "cells = [Cell(datum, datatype=i % 2) for i, datum in", "\\textbf{header1} & \\textbf{header2} \\\\ \\midrule \\textbf{stub1} & 0.0 & 1", "table_dec_below='*', header_dec_below='*', header_fmt = '%s', stub_fmt = '%s', title_align='r', header_align", "' ', colwidths = 1, colsep=' * ', row_pre =", "empty_cell = ' ', colwidths = 1, colsep=' * ',", "<th>stub2</th> <td>2</td> <td>3.333</td> \\n</tr>\\n</table>\\n''' actual = '\\n%s\\n' % tbl.as_html() actual", "[Cell(datum, datatype=i % 2) for i, datum in enumerate(celldata)] for", "</tr> <tr> <th>stub1</th> <td>0.0</td> <td>1</td> </tr> <tr> <th>stub2</th> <td>2</td> <td>3.333</td>", "stub2 * 2.00 * 3 * ***************************** \"\"\" actual =", "2) for i, datum in enumerate(celldata)] for cell, datum in", "tbl.as_text() #print('actual') #print(actual) #print('desired') #print(desired) assert_equal(actual, desired) def test_ltx_fmt1(self): #", "row] for row in tbl]) desired = \"\"\" ***************************** *", "cell1data = 1 row0data = [cell0data, cell1data] row1data = [2,", "dict( data_fmts = ['%0.2f', '%d'], empty_cell = ' ', colwidths", "in tbl]) desired = \"\"\" ***************************** * * header1 *", "<td>0.0</td> <td>1</td> \\n</tr>\\n<tr>\\n <th>stub2</th> <td>2</td> <td>3.333</td> \\n</tr>\\n</table>\\n''' actual = '\\n%s\\n'", "= 0.0000 cell1data = 1 row0data = [cell0data, cell1data] row1data", "assert_equal(cell.data, datum) class TestSimpleTable(object): def test_txt_fmt1(self): # Limited test of", "tbl[1][1].data = np.nan tbl.label_cells(custom_labeller) #print([[c.datatype for c in row] for", "#print(actual) #print(desired) assert_equal(actual, desired) def test_html_fmt1(self): # Limited test of", "'missing' class TestCell(object): def test_celldata(self): celldata = cell0data, cell1data, row1data[0],", "\"\"\" ***************************** * * header1 * header2 * ***************************** *", "row1data[0], row1data[1] cells = [Cell(datum, datatype=i % 2) for i,", "'%d'], empty_cell = ' ', colwidths = 1, colsep=' *", "'\\n'.join((line.rstrip() for line in actual.split('\\n'))) #print(actual) #print(desired) #print len(actual), len(desired)", "<td>0.0</td> <td>1</td> </tr> <tr> <th>stub2</th> <td>2</td> <td>3.333</td> </tr> </table> \"\"\"", "= ('stub1', 'stub2') test1header = ('header1', 'header2') #test1header = ('header1\\nheader1a',", "actual = '\\n%s\\n' % tbl.as_latex_tabular() #print(actual) #print(desired) assert_equal(actual, desired) def", "datum in zip(cells, celldata): assert_equal(cell.data, datum) class TestSimpleTable(object): def test_txt_fmt1(self):", "= cell0data, cell1data, row1data[0], row1data[1] cells = [Cell(datum, datatype=i %", "* * header1 * header2 * ***************************** * stub1 *", "Limited test of custom ltx_fmt desired = r\"\"\" \\begin{center} \\begin{tabular}{lcc}", "ltx_fmt desired = r\"\"\" \\begin{center} \\begin{tabular}{lcc} \\toprule & \\textbf{header1} &", "<th>stub1</th> <td>0.0</td> <td>1</td> \\n</tr>\\n<tr>\\n <th>stub2</th> <td>2</td> <td>3.333</td> \\n</tr>\\n</table>\\n''' actual =", ") cell0data = 0.0000 cell1data = 1 row0data = [cell0data,", "<td>1</td> \\n</tr>\\n<tr>\\n <th>stub2</th> <td>2</td> <td>3.333</td> \\n</tr>\\n</table>\\n''' actual = '\\n%s\\n' %", "import default_html_fmt ltx_fmt1 = default_latex_fmt.copy() html_fmt1 = default_html_fmt.copy() txt_fmt1 =", "\\bottomrule \\end{tabular} \\end{center} \"\"\" actual = '\\n%s\\n' % tbl.as_latex_tabular() #print(actual)", "table_dec_above='*', table_dec_below='*', header_dec_below='*', header_fmt = '%s', stub_fmt = '%s', title_align='r',", "test of custom ltx_fmt desired = r\"\"\" \\begin{center} \\begin{tabular}{lcc} \\toprule", "default_latex_fmt from statsmodels.iolib.table import default_html_fmt ltx_fmt1 = default_latex_fmt.copy() html_fmt1 =", "default_html_fmt.copy() txt_fmt1 = dict( data_fmts = ['%0.2f', '%d'], empty_cell =", "\"restructuredtext en\" from statsmodels.iolib.table import Cell, SimpleTable from statsmodels.iolib.table import", "3.333] table1data = [ row0data, row1data ] test1stubs = ('stub1',", "test of custom txt_fmt desired = \"\"\" ***************************** * *", "'\\n%s\\n' % tbl.as_latex_tabular() #print(actual) #print(desired) assert_equal(actual, desired) def test_html_fmt1(self): #", "= np.nan tbl.label_cells(custom_labeller) #print([[c.datatype for c in row] for row", "SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1, html_fmt=html_fmt1) def custom_labeller(cell): if cell.data", "test_celldata(self): celldata = cell0data, cell1data, row1data[0], row1data[1] cells = [Cell(datum,", "r\"\"\" \\begin{center} \\begin{tabular}{lcc} \\toprule & \\textbf{header1} & \\textbf{header2} \\\\ \\midrule", "is np.nan: return 'missing' class TestCell(object): def test_celldata(self): celldata =", "% tbl.as_latex_tabular() #print(actual) #print(desired) assert_equal(actual, desired) def test_html_fmt1(self): # Limited", "= '\\n%s\\n' % tbl.as_html() actual = '\\n'.join((line.rstrip() for line in", "***************************** * * header1 * header2 * ***************************** * stub1", "statsmodels.iolib.table import Cell, SimpleTable from statsmodels.iolib.table import default_latex_fmt from statsmodels.iolib.table", "__future__ import absolute_import from statsmodels.compat.python import zip import numpy as", "= 'r', data_aligns = \"r\", stubs_align = \"l\", fmt =", "that got removed #desired = '''\\n<table class=\"simpletable\">\\n<tr>\\n <td></td> <th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n", "from statsmodels.compat.python import zip import numpy as np from numpy.testing", "statsmodels.iolib.table import default_latex_fmt from statsmodels.iolib.table import default_html_fmt ltx_fmt1 = default_latex_fmt.copy()", "'stub2') test1header = ('header1', 'header2') #test1header = ('header1\\nheader1a', 'header2\\nheader2a') tbl", "& 1 \\\\ \\textbf{stub2} & 2 & 3.333 \\\\ \\bottomrule", "http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 ''' from __future__ import absolute_import from statsmodels.compat.python", "http://docs.python.org/lib/minimal-example.html for an intro to unittest :see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292", "[2, 3.333] table1data = [ row0data, row1data ] test1stubs =", "\\end{tabular} \\end{center} \"\"\" actual = '\\n%s\\n' % tbl.as_latex_tabular() #print(actual) #print(desired)", "* 2.00 * 3 * ***************************** \"\"\" actual = '\\n%s\\n'", "*', table_dec_above='*', table_dec_below='*', header_dec_below='*', header_fmt = '%s', stub_fmt = '%s',", "test_html_fmt1(self): # Limited test of custom html_fmt desired = \"\"\"", "1, colsep=' * ', row_pre = '* ', row_post =", "import default_latex_fmt from statsmodels.iolib.table import default_html_fmt ltx_fmt1 = default_latex_fmt.copy() html_fmt1", "row0data = [cell0data, cell1data] row1data = [2, 3.333] table1data =", "test_customlabel(self): # Limited test of custom custom labeling tbl =", "1 row0data = [cell0data, cell1data] row1data = [2, 3.333] table1data", "test1stubs, txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1, html_fmt=html_fmt1) def custom_labeller(cell): if cell.data is np.nan:", "desired = \"\"\" ***************************** * * header1 * header2 *", "= [Cell(datum, datatype=i % 2) for i, datum in enumerate(celldata)]", "for cell, datum in zip(cells, celldata): assert_equal(cell.data, datum) class TestSimpleTable(object):", "* ***************************** * stub1 * 0.00 * 1 * *", "custom txt_fmt desired = \"\"\" ***************************** * * header1 *", "test of custom custom labeling tbl = SimpleTable(table1data, test1header, test1stubs,", "= [2, 3.333] table1data = [ row0data, row1data ] test1stubs", "# Limited test of custom ltx_fmt desired = r\"\"\" \\begin{center}", ":see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 ''' from __future__ import absolute_import from statsmodels.compat.python import", "stub_fmt = '%s', title_align='r', header_align = 'r', data_aligns = \"r\",", "for i, datum in enumerate(celldata)] for cell, datum in zip(cells,", "html_fmt=html_fmt1) def custom_labeller(cell): if cell.data is np.nan: return 'missing' class", "\\end{center} \"\"\" actual = '\\n%s\\n' % tbl.as_latex_tabular() #print(actual) #print(desired) assert_equal(actual,", "\\\\ \\bottomrule \\end{tabular} \\end{center} \"\"\" actual = '\\n%s\\n' % tbl.as_latex_tabular()", "''' from __future__ import absolute_import from statsmodels.compat.python import zip import", "custom custom labeling tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data", "= 'txt' ) cell0data = 0.0000 cell1data = 1 row0data", "= SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data = np.nan tbl.label_cells(custom_labeller) #print([[c.datatype", "Limited test of custom custom labeling tbl = SimpleTable(table1data, test1header,", "# Limited test of custom txt_fmt desired = \"\"\" *****************************", "def test_celldata(self): celldata = cell0data, cell1data, row1data[0], row1data[1] cells =", "# Limited test of custom custom labeling tbl = SimpleTable(table1data,", "__docformat__ = \"restructuredtext en\" from statsmodels.iolib.table import Cell, SimpleTable from", "& 0.0 & 1 \\\\ \\textbf{stub2} & 2 & 3.333", "#test1header = ('header1\\nheader1a', 'header2\\nheader2a') tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1,", "in enumerate(celldata)] for cell, datum in zip(cells, celldata): assert_equal(cell.data, datum)", "test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data = np.nan tbl.label_cells(custom_labeller) #print([[c.datatype for c in", "stub1 * 0.00 * 1 * * stub2 * 2.00", "'txt' ) cell0data = 0.0000 cell1data = 1 row0data =", "#print(actual) #print('desired') #print(desired) assert_equal(actual, desired) def test_ltx_fmt1(self): # Limited test", "\"l\", fmt = 'txt' ) cell0data = 0.0000 cell1data =", "np from numpy.testing import assert_equal __docformat__ = \"restructuredtext en\" from", "of custom custom labeling tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1)", "zip import numpy as np from numpy.testing import assert_equal __docformat__", "default_latex_fmt.copy() html_fmt1 = default_html_fmt.copy() txt_fmt1 = dict( data_fmts = ['%0.2f',", "'''\\n<table class=\"simpletable\">\\n<tr>\\n <td></td> <th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th> <td>0.0</td> <td>1</td> \\n</tr>\\n<tr>\\n <th>stub2</th>", "= ('header1\\nheader1a', 'header2\\nheader2a') tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1,", "tbl.as_html() actual = '\\n'.join((line.rstrip() for line in actual.split('\\n'))) #print(actual) #print(desired)", "tests table.py. :see: http://docs.python.org/lib/minimal-example.html for an intro to unittest :see:", "cell0data, cell1data, row1data[0], row1data[1] cells = [Cell(datum, datatype=i % 2)", "'header2\\nheader2a') tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1, html_fmt=html_fmt1) def", "* stub2 * 2.00 * 3 * ***************************** \"\"\" actual", "row0data, row1data ] test1stubs = ('stub1', 'stub2') test1header = ('header1',", "\"\"\" <table class=\"simpletable\"> <tr> <td></td> <th>header1</th> <th>header2</th> </tr> <tr> <th>stub1</th>", "header2 * ***************************** * stub1 * -- * 1 *", "ltx_fmt=ltx_fmt1, html_fmt=html_fmt1) def custom_labeller(cell): if cell.data is np.nan: return 'missing'", "txt_fmt desired = \"\"\" ***************************** * * header1 * header2", "\\begin{center} \\begin{tabular}{lcc} \\toprule & \\textbf{header1} & \\textbf{header2} \\\\ \\midrule \\textbf{stub1}", "\\\\ \\textbf{stub2} & 2 & 3.333 \\\\ \\bottomrule \\end{tabular} \\end{center}", "* ', row_pre = '* ', row_post = ' *',", "cell0data = 0.0000 cell1data = 1 row0data = [cell0data, cell1data]", "* 0.00 * 1 * * stub2 * 2.00 *", "Limited test of custom html_fmt desired = \"\"\" <table class=\"simpletable\">", "return 'missing' class TestCell(object): def test_celldata(self): celldata = cell0data, cell1data,", "<td>3.333</td> \\n</tr>\\n</table>\\n''' actual = '\\n%s\\n' % tbl.as_html() actual = '\\n'.join((line.rstrip()", "2 & 3.333 \\\\ \\bottomrule \\end{tabular} \\end{center} \"\"\" actual =", "labeling tbl = SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data = np.nan", "= 1 row0data = [cell0data, cell1data] row1data = [2, 3.333]", "actual = '\\n%s\\n' % tbl.as_html() actual = '\\n'.join((line.rstrip() for line", "ltx_fmt1 = default_latex_fmt.copy() html_fmt1 = default_html_fmt.copy() txt_fmt1 = dict( data_fmts", "enumerate(celldata)] for cell, datum in zip(cells, celldata): assert_equal(cell.data, datum) class", "3 * ***************************** \"\"\" actual = '\\n%s\\n' % tbl.as_text(missing='--') assert_equal(actual,", "\\textbf{header2} \\\\ \\midrule \\textbf{stub1} & 0.0 & 1 \\\\ \\textbf{stub2}", "</tr> <tr> <th>stub2</th> <td>2</td> <td>3.333</td> </tr> </table> \"\"\" #the previous", "numpy.testing import assert_equal __docformat__ = \"restructuredtext en\" from statsmodels.iolib.table import", "3 * ***************************** \"\"\" actual = '\\n%s\\n' % tbl.as_text() #print('actual')", "header1 * header2 * ***************************** * stub1 * -- *", "<td>3.333</td> </tr> </table> \"\"\" #the previous has significant trailing whitespace", "* ***************************** \"\"\" actual = '\\n%s\\n' % tbl.as_text(missing='--') assert_equal(actual, desired)", "row1data = [2, 3.333] table1data = [ row0data, row1data ]", "#print(desired) assert_equal(actual, desired) def test_ltx_fmt1(self): # Limited test of custom", "* 1 * * stub2 * 2.00 * 3 *", "desired) def test_ltx_fmt1(self): # Limited test of custom ltx_fmt desired", "***************************** \"\"\" actual = '\\n%s\\n' % tbl.as_text() #print('actual') #print(actual) #print('desired')", "</table> \"\"\" #the previous has significant trailing whitespace that got", "celldata): assert_equal(cell.data, datum) class TestSimpleTable(object): def test_txt_fmt1(self): # Limited test", "custom_labeller(cell): if cell.data is np.nan: return 'missing' class TestCell(object): def", "'%s', title_align='r', header_align = 'r', data_aligns = \"r\", stubs_align =", "* stub1 * 0.00 * 1 * * stub2 *", "] test1stubs = ('stub1', 'stub2') test1header = ('header1', 'header2') #test1header", "assert_equal(actual, desired) def test_customlabel(self): # Limited test of custom custom", "<th>header2</th> </tr> <tr> <th>stub1</th> <td>0.0</td> <td>1</td> </tr> <tr> <th>stub2</th> <td>2</td>", "title_align='r', header_align = 'r', data_aligns = \"r\", stubs_align = \"l\",", "c in row] for row in tbl]) desired = \"\"\"", "line in actual.split('\\n'))) #print(actual) #print(desired) #print len(actual), len(desired) assert_equal(actual, desired)", "class=\"simpletable\"> <tr> <td></td> <th>header1</th> <th>header2</th> </tr> <tr> <th>stub1</th> <td>0.0</td> <td>1</td>", "class TestSimpleTable(object): def test_txt_fmt1(self): # Limited test of custom txt_fmt", "test_ltx_fmt1(self): # Limited test of custom ltx_fmt desired = r\"\"\"", "header1 * header2 * ***************************** * stub1 * 0.00 *", "header_fmt = '%s', stub_fmt = '%s', title_align='r', header_align = 'r',", "in actual.split('\\n'))) #print(actual) #print(desired) #print len(actual), len(desired) assert_equal(actual, desired) def", "'header2') #test1header = ('header1\\nheader1a', 'header2\\nheader2a') tbl = SimpleTable(table1data, test1header, test1stubs,", "actual.split('\\n'))) #print(actual) #print(desired) #print len(actual), len(desired) assert_equal(actual, desired) def test_customlabel(self):", "from __future__ import absolute_import from statsmodels.compat.python import zip import numpy", "'\\n%s\\n' % tbl.as_text() #print('actual') #print(actual) #print('desired') #print(desired) assert_equal(actual, desired) def", "assert_equal(actual, desired) def test_html_fmt1(self): # Limited test of custom html_fmt", "= dict( data_fmts = ['%0.2f', '%d'], empty_cell = ' ',", "\\\\ \\midrule \\textbf{stub1} & 0.0 & 1 \\\\ \\textbf{stub2} &", "colsep=' * ', row_pre = '* ', row_post = '", "#print([[c.datatype for c in row] for row in tbl]) desired", "* ***************************** \"\"\" actual = '\\n%s\\n' % tbl.as_text() #print('actual') #print(actual)", "* 3 * ***************************** \"\"\" actual = '\\n%s\\n' % tbl.as_text()", "= ' *', table_dec_above='*', table_dec_below='*', header_dec_below='*', header_fmt = '%s', stub_fmt", "= ('header1', 'header2') #test1header = ('header1\\nheader1a', 'header2\\nheader2a') tbl = SimpleTable(table1data,", "= ['%0.2f', '%d'], empty_cell = ' ', colwidths = 1,", "unittest :see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 ''' from __future__ import absolute_import", "to unittest :see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 ''' from __future__ import", "for row in tbl]) desired = \"\"\" ***************************** * *", "class=\"simpletable\">\\n<tr>\\n <td></td> <th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th> <td>0.0</td> <td>1</td> \\n</tr>\\n<tr>\\n <th>stub2</th> <td>2</td>", "= \"\"\" ***************************** * * header1 * header2 * *****************************", "% tbl.as_html() actual = '\\n'.join((line.rstrip() for line in actual.split('\\n'))) #print(actual)", "-- * 1 * * stub2 * 2.00 * 3", "from numpy.testing import assert_equal __docformat__ = \"restructuredtext en\" from statsmodels.iolib.table", "\"\"\" actual = '\\n%s\\n' % tbl.as_text() #print('actual') #print(actual) #print('desired') #print(desired)", "% 2) for i, datum in enumerate(celldata)] for cell, datum", "% tbl.as_text() #print('actual') #print(actual) #print('desired') #print(desired) assert_equal(actual, desired) def test_ltx_fmt1(self):", "test of custom html_fmt desired = \"\"\" <table class=\"simpletable\"> <tr>", ":see: http://docs.python.org/lib/minimal-example.html for an intro to unittest :see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see:", "got removed #desired = '''\\n<table class=\"simpletable\">\\n<tr>\\n <td></td> <th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th>", "= \"restructuredtext en\" from statsmodels.iolib.table import Cell, SimpleTable from statsmodels.iolib.table", "cell1data] row1data = [2, 3.333] table1data = [ row0data, row1data", "assert_equal __docformat__ = \"restructuredtext en\" from statsmodels.iolib.table import Cell, SimpleTable", "def custom_labeller(cell): if cell.data is np.nan: return 'missing' class TestCell(object):", "assert_equal(actual, desired) def test_ltx_fmt1(self): # Limited test of custom ltx_fmt", "def test_ltx_fmt1(self): # Limited test of custom ltx_fmt desired =", "TestSimpleTable(object): def test_txt_fmt1(self): # Limited test of custom txt_fmt desired", "in row] for row in tbl]) desired = \"\"\" *****************************", "whitespace that got removed #desired = '''\\n<table class=\"simpletable\">\\n<tr>\\n <td></td> <th>header1</th>", "if cell.data is np.nan: return 'missing' class TestCell(object): def test_celldata(self):", "<th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th> <td>0.0</td> <td>1</td> \\n</tr>\\n<tr>\\n <th>stub2</th> <td>2</td> <td>3.333</td> \\n</tr>\\n</table>\\n''' actual", "# Limited test of custom html_fmt desired = \"\"\" <table", "np.nan: return 'missing' class TestCell(object): def test_celldata(self): celldata = cell0data,", "removed #desired = '''\\n<table class=\"simpletable\">\\n<tr>\\n <td></td> <th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th> <td>0.0</td>", "len(desired) assert_equal(actual, desired) def test_customlabel(self): # Limited test of custom", "('header1', 'header2') #test1header = ('header1\\nheader1a', 'header2\\nheader2a') tbl = SimpleTable(table1data, test1header,", "test1header, test1stubs, txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1, html_fmt=html_fmt1) def custom_labeller(cell): if cell.data is", "TestCell(object): def test_celldata(self): celldata = cell0data, cell1data, row1data[0], row1data[1] cells", "= '* ', row_post = ' *', table_dec_above='*', table_dec_below='*', header_dec_below='*',", "table1data = [ row0data, row1data ] test1stubs = ('stub1', 'stub2')", "#desired = '''\\n<table class=\"simpletable\">\\n<tr>\\n <td></td> <th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th> <td>0.0</td> <td>1</td>", "SimpleTable(table1data, test1header, test1stubs, txt_fmt=txt_fmt1) tbl[1][1].data = np.nan tbl.label_cells(custom_labeller) #print([[c.datatype for", "trailing whitespace that got removed #desired = '''\\n<table class=\"simpletable\">\\n<tr>\\n <td></td>", "* header1 * header2 * ***************************** * stub1 * --", "& \\textbf{header1} & \\textbf{header2} \\\\ \\midrule \\textbf{stub1} & 0.0 &", "= '\\n'.join((line.rstrip() for line in actual.split('\\n'))) #print(actual) #print(desired) #print len(actual),", "numpy as np from numpy.testing import assert_equal __docformat__ = \"restructuredtext", "as np from numpy.testing import assert_equal __docformat__ = \"restructuredtext en\"", "\\textbf{stub1} & 0.0 & 1 \\\\ \\textbf{stub2} & 2 &", "class TestCell(object): def test_celldata(self): celldata = cell0data, cell1data, row1data[0], row1data[1]", "row1data ] test1stubs = ('stub1', 'stub2') test1header = ('header1', 'header2')", "datum in enumerate(celldata)] for cell, datum in zip(cells, celldata): assert_equal(cell.data,", "= [cell0data, cell1data] row1data = [2, 3.333] table1data = [", "\\textbf{stub2} & 2 & 3.333 \\\\ \\bottomrule \\end{tabular} \\end{center} \"\"\"", "txt_fmt1 = dict( data_fmts = ['%0.2f', '%d'], empty_cell = '", "\\midrule \\textbf{stub1} & 0.0 & 1 \\\\ \\textbf{stub2} & 2", "fmt = 'txt' ) cell0data = 0.0000 cell1data = 1", "SimpleTable from statsmodels.iolib.table import default_latex_fmt from statsmodels.iolib.table import default_html_fmt ltx_fmt1", "= default_latex_fmt.copy() html_fmt1 = default_html_fmt.copy() txt_fmt1 = dict( data_fmts =", "Unit tests table.py. :see: http://docs.python.org/lib/minimal-example.html for an intro to unittest", "stubs_align = \"l\", fmt = 'txt' ) cell0data = 0.0000", "= '%s', title_align='r', header_align = 'r', data_aligns = \"r\", stubs_align", "from statsmodels.iolib.table import Cell, SimpleTable from statsmodels.iolib.table import default_latex_fmt from", "''' Unit tests table.py. :see: http://docs.python.org/lib/minimal-example.html for an intro to", "= \"\"\" <table class=\"simpletable\"> <tr> <td></td> <th>header1</th> <th>header2</th> </tr> <tr>", "custom html_fmt desired = \"\"\" <table class=\"simpletable\"> <tr> <td></td> <th>header1</th>", "= 1, colsep=' * ', row_pre = '* ', row_post", "cell1data, row1data[0], row1data[1] cells = [Cell(datum, datatype=i % 2) for", "import assert_equal __docformat__ = \"restructuredtext en\" from statsmodels.iolib.table import Cell,", "[cell0data, cell1data] row1data = [2, 3.333] table1data = [ row0data,", "row1data[1] cells = [Cell(datum, datatype=i % 2) for i, datum", "statsmodels.compat.python import zip import numpy as np from numpy.testing import", "'%s', stub_fmt = '%s', title_align='r', header_align = 'r', data_aligns =", "#print(desired) assert_equal(actual, desired) def test_html_fmt1(self): # Limited test of custom", "def test_html_fmt1(self): # Limited test of custom html_fmt desired =", "has significant trailing whitespace that got removed #desired = '''\\n<table", "test_txt_fmt1(self): # Limited test of custom txt_fmt desired = \"\"\"", "1 * * stub2 * 2.00 * 3 * *****************************", "<td></td> <th>header1</th> <th>header2</th>\\n</tr>\\n<tr>\\n <th>stub1</th> <td>0.0</td> <td>1</td> \\n</tr>\\n<tr>\\n <th>stub2</th> <td>2</td> <td>3.333</td>", "', colwidths = 1, colsep=' * ', row_pre = '*", "significant trailing whitespace that got removed #desired = '''\\n<table class=\"simpletable\">\\n<tr>\\n", "i, datum in enumerate(celldata)] for cell, datum in zip(cells, celldata):", "1 \\\\ \\textbf{stub2} & 2 & 3.333 \\\\ \\bottomrule \\end{tabular}", "datatype=i % 2) for i, datum in enumerate(celldata)] for cell,", "zip(cells, celldata): assert_equal(cell.data, datum) class TestSimpleTable(object): def test_txt_fmt1(self): # Limited", "row_pre = '* ', row_post = ' *', table_dec_above='*', table_dec_below='*',", "custom ltx_fmt desired = r\"\"\" \\begin{center} \\begin{tabular}{lcc} \\toprule & \\textbf{header1}", "= '\\n%s\\n' % tbl.as_latex_tabular() #print(actual) #print(desired) assert_equal(actual, desired) def test_html_fmt1(self):", "intro to unittest :see: http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-1-unittest.html :see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/305292 ''' from __future__", "celldata = cell0data, cell1data, row1data[0], row1data[1] cells = [Cell(datum, datatype=i", "<th>header1</th> <th>header2</th> </tr> <tr> <th>stub1</th> <td>0.0</td> <td>1</td> </tr> <tr> <th>stub2</th>", "for line in actual.split('\\n'))) #print(actual) #print(desired) #print len(actual), len(desired) assert_equal(actual,", "of custom html_fmt desired = \"\"\" <table class=\"simpletable\"> <tr> <td></td>", "<td>2</td> <td>3.333</td> \\n</tr>\\n</table>\\n''' actual = '\\n%s\\n' % tbl.as_html() actual =", "<td>1</td> </tr> <tr> <th>stub2</th> <td>2</td> <td>3.333</td> </tr> </table> \"\"\" #the", "header_dec_below='*', header_fmt = '%s', stub_fmt = '%s', title_align='r', header_align =", "* ***************************** * stub1 * -- * 1 * *", "= ' ', colwidths = 1, colsep=' * ', row_pre", "= default_html_fmt.copy() txt_fmt1 = dict( data_fmts = ['%0.2f', '%d'], empty_cell", "'\\n%s\\n' % tbl.as_html() actual = '\\n'.join((line.rstrip() for line in actual.split('\\n')))", "txt_fmt=txt_fmt1, ltx_fmt=ltx_fmt1, html_fmt=html_fmt1) def custom_labeller(cell): if cell.data is np.nan: return", "of custom ltx_fmt desired = r\"\"\" \\begin{center} \\begin{tabular}{lcc} \\toprule &", "= '\\n%s\\n' % tbl.as_text() #print('actual') #print(actual) #print('desired') #print(desired) assert_equal(actual, desired)" ]
[ "\"\"\"Types for the Todoist component.\"\"\" from __future__ import annotations from", "date in a todoist api response.\"\"\" date: str is_recurring: bool", "class DueDate(TypedDict): \"\"\"Dict representing a due date in a todoist", "from __future__ import annotations from typing import TypedDict class DueDate(TypedDict):", "import TypedDict class DueDate(TypedDict): \"\"\"Dict representing a due date in", "a todoist api response.\"\"\" date: str is_recurring: bool lang: str", "annotations from typing import TypedDict class DueDate(TypedDict): \"\"\"Dict representing a", "from typing import TypedDict class DueDate(TypedDict): \"\"\"Dict representing a due", "a due date in a todoist api response.\"\"\" date: str", "DueDate(TypedDict): \"\"\"Dict representing a due date in a todoist api", "import annotations from typing import TypedDict class DueDate(TypedDict): \"\"\"Dict representing", "todoist api response.\"\"\" date: str is_recurring: bool lang: str string:", "<gh_stars>1000+ \"\"\"Types for the Todoist component.\"\"\" from __future__ import annotations", "\"\"\"Dict representing a due date in a todoist api response.\"\"\"", "due date in a todoist api response.\"\"\" date: str is_recurring:", "response.\"\"\" date: str is_recurring: bool lang: str string: str timezone:", "representing a due date in a todoist api response.\"\"\" date:", "str is_recurring: bool lang: str string: str timezone: str |", "the Todoist component.\"\"\" from __future__ import annotations from typing import", "TypedDict class DueDate(TypedDict): \"\"\"Dict representing a due date in a", "for the Todoist component.\"\"\" from __future__ import annotations from typing", "Todoist component.\"\"\" from __future__ import annotations from typing import TypedDict", "__future__ import annotations from typing import TypedDict class DueDate(TypedDict): \"\"\"Dict", "date: str is_recurring: bool lang: str string: str timezone: str", "api response.\"\"\" date: str is_recurring: bool lang: str string: str", "is_recurring: bool lang: str string: str timezone: str | None", "in a todoist api response.\"\"\" date: str is_recurring: bool lang:", "typing import TypedDict class DueDate(TypedDict): \"\"\"Dict representing a due date", "component.\"\"\" from __future__ import annotations from typing import TypedDict class" ]
[ "_zstd._ZSTD_c_dictIDFlag nbWorkers = _zstd._ZSTD_c_nbWorkers jobSize = _zstd._ZSTD_c_jobSize overlapLog = _zstd._ZSTD_c_overlapLog", "'get_frame_info', 'CParameter', 'DParameter', 'Strategy', # From _zstd 'ZstdCompressor', 'RichMemZstdCompressor', 'ZstdDecompressor',", "parameter return _zstd._get_param_bounds(1, self.value) class DParameter(IntEnum): \"\"\"Decompression parameters\"\"\" windowLogMax =", "_zstd._ZSTD_c_contentSizeFlag checksumFlag = _zstd._ZSTD_c_checksumFlag dictIDFlag = _zstd._ZSTD_c_dictIDFlag nbWorkers = _zstd._ZSTD_c_nbWorkers", "searchLog = _zstd._ZSTD_c_searchLog minMatch = _zstd._ZSTD_c_minMatch targetLength = _zstd._ZSTD_c_targetLength strategy", "self.value) class Strategy(IntEnum): \"\"\"Compression strategies, listed from fastest to strongest.", "# Used in __init__.py _ZSTD_DStreamInSize = _zstd._ZSTD_DStreamInSize _train_dict = _zstd._train_dict", "btopt = _zstd._ZSTD_btopt btultra = _zstd._ZSTD_btultra btultra2 = _zstd._ZSTD_btultra2 #", "'compress_stream', 'decompress_stream', 'zstd_version', 'zstd_version_info', 'zstd_support_multithread') # Used in __init__.py _ZSTD_DStreamInSize", "btultra2 = _zstd._ZSTD_btultra2 # Set CParameter/DParameter types for validity check", "namedtuple in the future.\"\"\" ret_tuple = _zstd._get_frame_info(frame_buffer) return _nt_frame_info(*ret_tuple) class", "= _zstd._ZSTD_c_ldmHashRateLog contentSizeFlag = _zstd._ZSTD_c_contentSizeFlag checksumFlag = _zstd._ZSTD_c_checksumFlag dictIDFlag =", "= _zstd._ZSTD_greedy lazy = _zstd._ZSTD_lazy lazy2 = _zstd._ZSTD_lazy2 btlazy2 =", "# 0 means decompression parameter return _zstd._get_param_bounds(0, self.value) class Strategy(IntEnum):", "in the future.\"\"\" ret_tuple = _zstd._get_frame_info(frame_buffer) return _nt_frame_info(*ret_tuple) class CParameter(IntEnum):", "fast to strong) is guaranteed. \"\"\" fast = _zstd._ZSTD_fast dfast", "(# From this file 'compressionLevel_values', 'get_frame_info', 'CParameter', 'DParameter', 'Strategy', #", "contentSizeFlag = _zstd._ZSTD_c_contentSizeFlag checksumFlag = _zstd._ZSTD_c_checksumFlag dictIDFlag = _zstd._ZSTD_c_dictIDFlag nbWorkers", "['decompressed_size', 'dictionary_id']) def get_frame_info(frame_buffer): \"\"\"Get zstd frame infomation from a", "size is unknown. dictionary_id is a 32-bit unsigned integer value.", "the order (from fast to strong) is guaranteed. \"\"\" fast", "_zstd._finalize_dict # compressionLevel_values _nt_values = namedtuple('values', ['default', 'min', 'max']) compressionLevel_values", "need a dictionary to be decoded, and the ID of", "ret_tuple = _zstd._get_frame_info(frame_buffer) return _nt_frame_info(*ret_tuple) class CParameter(IntEnum): \"\"\"Compression parameters\"\"\" compressionLevel", "self.value) class DParameter(IntEnum): \"\"\"Decompression parameters\"\"\" windowLogMax = _zstd._ZSTD_d_windowLogMax def bounds(self):", "ldmBucketSizeLog = _zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog = _zstd._ZSTD_c_ldmHashRateLog contentSizeFlag = _zstd._ZSTD_c_contentSizeFlag checksumFlag", "From _zstd 'ZstdCompressor', 'RichMemZstdCompressor', 'ZstdDecompressor', 'EndlessZstdDecompressor', 'ZstdDict', 'ZstdError', 'decompress', 'get_frame_size',", "is guaranteed. \"\"\" fast = _zstd._ZSTD_fast dfast = _zstd._ZSTD_dfast greedy", "'min', 'max']) compressionLevel_values = _nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel) _nt_frame_info = namedtuple('frame_info',", "items to the namedtuple in the future.\"\"\" ret_tuple = _zstd._get_frame_info(frame_buffer)", "ldmHashLog = _zstd._ZSTD_c_ldmHashLog ldmMinMatch = _zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog = _zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog", "not recorded in the frame header, the frame may or", "<filename>src/c/c_pyzstd.py from collections import namedtuple from enum import IntEnum from", "\"\"\"Return lower and upper bounds of a parameter, both inclusive.\"\"\"", "_ZSTD_DStreamInSize = _zstd._ZSTD_DStreamInSize _train_dict = _zstd._train_dict _finalize_dict = _zstd._finalize_dict #", "the namedtuple in the future.\"\"\" ret_tuple = _zstd._get_frame_info(frame_buffer) return _nt_frame_info(*ret_tuple)", "targetLength = _zstd._ZSTD_c_targetLength strategy = _zstd._ZSTD_c_strategy enableLongDistanceMatching = _zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog", "_zstd __all__ = (# From this file 'compressionLevel_values', 'get_frame_info', 'CParameter',", "and upper bounds of a parameter, both inclusive.\"\"\" # 0", "_zstd 'ZstdCompressor', 'RichMemZstdCompressor', 'ZstdDecompressor', 'EndlessZstdDecompressor', 'ZstdDict', 'ZstdError', 'decompress', 'get_frame_size', 'compress_stream',", "_zstd._ZSTD_lazy2 btlazy2 = _zstd._ZSTD_btlazy2 btopt = _zstd._ZSTD_btopt btultra = _zstd._ZSTD_btultra", "two-items namedtuple: (decompressed_size, dictionary_id) If decompressed_size is None, decompressed size", "zstd frame infomation from a frame header. Argument frame_buffer: A", "_zstd._ZSTD_c_strategy enableLongDistanceMatching = _zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog = _zstd._ZSTD_c_ldmHashLog ldmMinMatch = _zstd._ZSTD_c_ldmMinMatch", "parameters\"\"\" windowLogMax = _zstd._ZSTD_d_windowLogMax def bounds(self): \"\"\"Return lower and upper", "frame infomation from a frame header. Argument frame_buffer: A bytes-like", "from ._zstd import * from . import _zstd __all__ =", "It should starts from the beginning of a frame, and", "windowLog = _zstd._ZSTD_c_windowLog hashLog = _zstd._ZSTD_c_hashLog chainLog = _zstd._ZSTD_c_chainLog searchLog", "means dictionary ID was not recorded in the frame header,", "to strong) is guaranteed. \"\"\" fast = _zstd._ZSTD_fast dfast =", "means compression parameter return _zstd._get_param_bounds(1, self.value) class DParameter(IntEnum): \"\"\"Decompression parameters\"\"\"", "= _zstd._ZSTD_c_checksumFlag dictIDFlag = _zstd._ZSTD_c_dictIDFlag nbWorkers = _zstd._ZSTD_c_nbWorkers jobSize =", "# 1 means compression parameter return _zstd._get_param_bounds(1, self.value) class DParameter(IntEnum):", "decompressed_size is None, decompressed size is unknown. dictionary_id is a", "return _zstd._get_param_bounds(1, self.value) class DParameter(IntEnum): \"\"\"Decompression parameters\"\"\" windowLogMax = _zstd._ZSTD_d_windowLogMax", "such a dictionary is not specified. It's possible to append", "= _zstd._ZSTD_c_overlapLog def bounds(self): \"\"\"Return lower and upper bounds of", "from enum import IntEnum from ._zstd import * from .", "a dictionary to be decoded, and the ID of such", "strategy = _zstd._ZSTD_c_strategy enableLongDistanceMatching = _zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog = _zstd._ZSTD_c_ldmHashLog ldmMinMatch", "_zstd._ZSTD_d_windowLogMax def bounds(self): \"\"\"Return lower and upper bounds of a", "chainLog = _zstd._ZSTD_c_chainLog searchLog = _zstd._ZSTD_c_searchLog minMatch = _zstd._ZSTD_c_minMatch targetLength", "_zstd._get_param_bounds(1, self.value) class DParameter(IntEnum): \"\"\"Decompression parameters\"\"\" windowLogMax = _zstd._ZSTD_d_windowLogMax def", "0 means decompression parameter return _zstd._get_param_bounds(0, self.value) class Strategy(IntEnum): \"\"\"Compression", "= _zstd._ZSTD_c_compressionLevel windowLog = _zstd._ZSTD_c_windowLog hashLog = _zstd._ZSTD_c_hashLog chainLog =", "_zstd._ZSTD_c_overlapLog def bounds(self): \"\"\"Return lower and upper bounds of a", "get_frame_info(frame_buffer): \"\"\"Get zstd frame infomation from a frame header. Argument", "a frame, and needs to include at least the frame", "Note : new strategies _might_ be added in the future,", "'DParameter', 'Strategy', # From _zstd 'ZstdCompressor', 'RichMemZstdCompressor', 'ZstdDecompressor', 'EndlessZstdDecompressor', 'ZstdDict',", "= _zstd._ZSTD_c_dictIDFlag nbWorkers = _zstd._ZSTD_c_nbWorkers jobSize = _zstd._ZSTD_c_jobSize overlapLog =", "_train_dict = _zstd._train_dict _finalize_dict = _zstd._finalize_dict # compressionLevel_values _nt_values =", "_zstd._ZSTD_c_ldmHashLog ldmMinMatch = _zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog = _zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog = _zstd._ZSTD_c_ldmHashRateLog", "should starts from the beginning of a frame, and needs", "header (6 to 18 bytes). Return a two-items namedtuple: (decompressed_size,", "= _zstd._ZSTD_lazy2 btlazy2 = _zstd._ZSTD_btlazy2 btopt = _zstd._ZSTD_btopt btultra =", "to 18 bytes). Return a two-items namedtuple: (decompressed_size, dictionary_id) If", "\"\"\"Get zstd frame infomation from a frame header. Argument frame_buffer:", "IntEnum from ._zstd import * from . import _zstd __all__", "namedtuple('values', ['default', 'min', 'max']) compressionLevel_values = _nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel) _nt_frame_info", "object. It should starts from the beginning of a frame,", "_zstd._ZSTD_btopt btultra = _zstd._ZSTD_btultra btultra2 = _zstd._ZSTD_btultra2 # Set CParameter/DParameter", "be added in the future, only the order (from fast", "= namedtuple('frame_info', ['decompressed_size', 'dictionary_id']) def get_frame_info(frame_buffer): \"\"\"Get zstd frame infomation", "upper bounds of a parameter, both inclusive.\"\"\" # 0 means", "only the order (from fast to strong) is guaranteed. \"\"\"", "ID was not recorded in the frame header, the frame", "lazy2 = _zstd._ZSTD_lazy2 btlazy2 = _zstd._ZSTD_btlazy2 btopt = _zstd._ZSTD_btopt btultra", "_zstd._ZSTD_btultra2 # Set CParameter/DParameter types for validity check _zstd._set_parameter_types(CParameter, DParameter)", "header. Argument frame_buffer: A bytes-like object. It should starts from", "in the future, only the order (from fast to strong)", "_nt_frame_info = namedtuple('frame_info', ['decompressed_size', 'dictionary_id']) def get_frame_info(frame_buffer): \"\"\"Get zstd frame", "a two-items namedtuple: (decompressed_size, dictionary_id) If decompressed_size is None, decompressed", "\"\"\"Compression parameters\"\"\" compressionLevel = _zstd._ZSTD_c_compressionLevel windowLog = _zstd._ZSTD_c_windowLog hashLog =", "_zstd._ZSTD_btlazy2 btopt = _zstd._ZSTD_btopt btultra = _zstd._ZSTD_btultra btultra2 = _zstd._ZSTD_btultra2", "= _zstd._ZSTD_c_hashLog chainLog = _zstd._ZSTD_c_chainLog searchLog = _zstd._ZSTD_c_searchLog minMatch =", "parameter return _zstd._get_param_bounds(0, self.value) class Strategy(IntEnum): \"\"\"Compression strategies, listed from", "= _zstd._ZSTD_DStreamInSize _train_dict = _zstd._train_dict _finalize_dict = _zstd._finalize_dict # compressionLevel_values", "_zstd._ZSTD_c_checksumFlag dictIDFlag = _zstd._ZSTD_c_dictIDFlag nbWorkers = _zstd._ZSTD_c_nbWorkers jobSize = _zstd._ZSTD_c_jobSize", "Argument frame_buffer: A bytes-like object. It should starts from the", "possible to append more items to the namedtuple in the", "dictionary_id is a 32-bit unsigned integer value. 0 means dictionary", "'Strategy', # From _zstd 'ZstdCompressor', 'RichMemZstdCompressor', 'ZstdDecompressor', 'EndlessZstdDecompressor', 'ZstdDict', 'ZstdError',", "in the frame header, the frame may or may not", "\"\"\"Decompression parameters\"\"\" windowLogMax = _zstd._ZSTD_d_windowLogMax def bounds(self): \"\"\"Return lower and", "the frame header, the frame may or may not need", "import * from . import _zstd __all__ = (# From", "of a parameter, both inclusive.\"\"\" # 1 means compression parameter", "overlapLog = _zstd._ZSTD_c_overlapLog def bounds(self): \"\"\"Return lower and upper bounds", "'decompress_stream', 'zstd_version', 'zstd_version_info', 'zstd_support_multithread') # Used in __init__.py _ZSTD_DStreamInSize =", "From this file 'compressionLevel_values', 'get_frame_info', 'CParameter', 'DParameter', 'Strategy', # From", "_zstd._ZSTD_DStreamInSize _train_dict = _zstd._train_dict _finalize_dict = _zstd._finalize_dict # compressionLevel_values _nt_values", "class DParameter(IntEnum): \"\"\"Decompression parameters\"\"\" windowLogMax = _zstd._ZSTD_d_windowLogMax def bounds(self): \"\"\"Return", "strategies, listed from fastest to strongest. Note : new strategies", "class Strategy(IntEnum): \"\"\"Compression strategies, listed from fastest to strongest. Note", "'dictionary_id']) def get_frame_info(frame_buffer): \"\"\"Get zstd frame infomation from a frame", "is unknown. dictionary_id is a 32-bit unsigned integer value. 0", "['default', 'min', 'max']) compressionLevel_values = _nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel) _nt_frame_info =", "._zstd import * from . import _zstd __all__ = (#", "inclusive.\"\"\" # 1 means compression parameter return _zstd._get_param_bounds(1, self.value) class", "new strategies _might_ be added in the future, only the", "(decompressed_size, dictionary_id) If decompressed_size is None, decompressed size is unknown.", "bytes). Return a two-items namedtuple: (decompressed_size, dictionary_id) If decompressed_size is", "hashLog = _zstd._ZSTD_c_hashLog chainLog = _zstd._ZSTD_c_chainLog searchLog = _zstd._ZSTD_c_searchLog minMatch", "= _zstd._ZSTD_c_searchLog minMatch = _zstd._ZSTD_c_minMatch targetLength = _zstd._ZSTD_c_targetLength strategy =", "= _zstd._ZSTD_btlazy2 btopt = _zstd._ZSTD_btopt btultra = _zstd._ZSTD_btultra btultra2 =", "\"\"\"Compression strategies, listed from fastest to strongest. Note : new", "of a parameter, both inclusive.\"\"\" # 0 means decompression parameter", "added in the future, only the order (from fast to", "'zstd_version', 'zstd_version_info', 'zstd_support_multithread') # Used in __init__.py _ZSTD_DStreamInSize = _zstd._ZSTD_DStreamInSize", "= _nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel) _nt_frame_info = namedtuple('frame_info', ['decompressed_size', 'dictionary_id']) def", "unsigned integer value. 0 means dictionary ID was not recorded", "'compressionLevel_values', 'get_frame_info', 'CParameter', 'DParameter', 'Strategy', # From _zstd 'ZstdCompressor', 'RichMemZstdCompressor',", "to the namedtuple in the future.\"\"\" ret_tuple = _zstd._get_frame_info(frame_buffer) return", "\"\"\" fast = _zstd._ZSTD_fast dfast = _zstd._ZSTD_dfast greedy = _zstd._ZSTD_greedy", "_might_ be added in the future, only the order (from", "dictionary to be decoded, and the ID of such a", "dictIDFlag = _zstd._ZSTD_c_dictIDFlag nbWorkers = _zstd._ZSTD_c_nbWorkers jobSize = _zstd._ZSTD_c_jobSize overlapLog", "ID of such a dictionary is not specified. It's possible", "dictionary_id) If decompressed_size is None, decompressed size is unknown. dictionary_id", "lower and upper bounds of a parameter, both inclusive.\"\"\" #", "the ID of such a dictionary is not specified. It's", "'zstd_support_multithread') # Used in __init__.py _ZSTD_DStreamInSize = _zstd._ZSTD_DStreamInSize _train_dict =", "= _zstd._ZSTD_c_contentSizeFlag checksumFlag = _zstd._ZSTD_c_checksumFlag dictIDFlag = _zstd._ZSTD_c_dictIDFlag nbWorkers =", "parameter, both inclusive.\"\"\" # 1 means compression parameter return _zstd._get_param_bounds(1,", "at least the frame header (6 to 18 bytes). Return", "listed from fastest to strongest. Note : new strategies _might_", "is not specified. It's possible to append more items to", "= _zstd._ZSTD_fast dfast = _zstd._ZSTD_dfast greedy = _zstd._ZSTD_greedy lazy =", "the future.\"\"\" ret_tuple = _zstd._get_frame_info(frame_buffer) return _nt_frame_info(*ret_tuple) class CParameter(IntEnum): \"\"\"Compression", "__all__ = (# From this file 'compressionLevel_values', 'get_frame_info', 'CParameter', 'DParameter',", "'ZstdCompressor', 'RichMemZstdCompressor', 'ZstdDecompressor', 'EndlessZstdDecompressor', 'ZstdDict', 'ZstdError', 'decompress', 'get_frame_size', 'compress_stream', 'decompress_stream',", "both inclusive.\"\"\" # 1 means compression parameter return _zstd._get_param_bounds(1, self.value)", "= _zstd._get_frame_info(frame_buffer) return _nt_frame_info(*ret_tuple) class CParameter(IntEnum): \"\"\"Compression parameters\"\"\" compressionLevel =", "bounds(self): \"\"\"Return lower and upper bounds of a parameter, both", "import namedtuple from enum import IntEnum from ._zstd import *", "_zstd._ZSTD_c_compressionLevel windowLog = _zstd._ZSTD_c_windowLog hashLog = _zstd._ZSTD_c_hashLog chainLog = _zstd._ZSTD_c_chainLog", "ldmHashRateLog = _zstd._ZSTD_c_ldmHashRateLog contentSizeFlag = _zstd._ZSTD_c_contentSizeFlag checksumFlag = _zstd._ZSTD_c_checksumFlag dictIDFlag", "strong) is guaranteed. \"\"\" fast = _zstd._ZSTD_fast dfast = _zstd._ZSTD_dfast", "header, the frame may or may not need a dictionary", "decoded, and the ID of such a dictionary is not", "strategies _might_ be added in the future, only the order", "* from . import _zstd __all__ = (# From this", "future, only the order (from fast to strong) is guaranteed.", "include at least the frame header (6 to 18 bytes).", "= _zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog = _zstd._ZSTD_c_ldmHashRateLog contentSizeFlag = _zstd._ZSTD_c_contentSizeFlag checksumFlag =", "namedtuple from enum import IntEnum from ._zstd import * from", "'decompress', 'get_frame_size', 'compress_stream', 'decompress_stream', 'zstd_version', 'zstd_version_info', 'zstd_support_multithread') # Used in", "infomation from a frame header. Argument frame_buffer: A bytes-like object.", "the frame header (6 to 18 bytes). Return a two-items", "_nt_frame_info(*ret_tuple) class CParameter(IntEnum): \"\"\"Compression parameters\"\"\" compressionLevel = _zstd._ZSTD_c_compressionLevel windowLog =", "= _zstd._ZSTD_btultra btultra2 = _zstd._ZSTD_btultra2 # Set CParameter/DParameter types for", "_zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog = _zstd._ZSTD_c_ldmHashRateLog contentSizeFlag = _zstd._ZSTD_c_contentSizeFlag checksumFlag = _zstd._ZSTD_c_checksumFlag", "not specified. It's possible to append more items to the", ". import _zstd __all__ = (# From this file 'compressionLevel_values',", "'EndlessZstdDecompressor', 'ZstdDict', 'ZstdError', 'decompress', 'get_frame_size', 'compress_stream', 'decompress_stream', 'zstd_version', 'zstd_version_info', 'zstd_support_multithread')", "import _zstd __all__ = (# From this file 'compressionLevel_values', 'get_frame_info',", "_zstd._ZSTD_maxCLevel) _nt_frame_info = namedtuple('frame_info', ['decompressed_size', 'dictionary_id']) def get_frame_info(frame_buffer): \"\"\"Get zstd", "parameter, both inclusive.\"\"\" # 0 means decompression parameter return _zstd._get_param_bounds(0,", "both inclusive.\"\"\" # 0 means decompression parameter return _zstd._get_param_bounds(0, self.value)", "lazy = _zstd._ZSTD_lazy lazy2 = _zstd._ZSTD_lazy2 btlazy2 = _zstd._ZSTD_btlazy2 btopt", "'CParameter', 'DParameter', 'Strategy', # From _zstd 'ZstdCompressor', 'RichMemZstdCompressor', 'ZstdDecompressor', 'EndlessZstdDecompressor',", "guaranteed. \"\"\" fast = _zstd._ZSTD_fast dfast = _zstd._ZSTD_dfast greedy =", "0 means dictionary ID was not recorded in the frame", "not need a dictionary to be decoded, and the ID", "the future, only the order (from fast to strong) is", "_zstd._ZSTD_c_targetLength strategy = _zstd._ZSTD_c_strategy enableLongDistanceMatching = _zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog = _zstd._ZSTD_c_ldmHashLog", "= (# From this file 'compressionLevel_values', 'get_frame_info', 'CParameter', 'DParameter', 'Strategy',", "= _zstd._ZSTD_c_strategy enableLongDistanceMatching = _zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog = _zstd._ZSTD_c_ldmHashLog ldmMinMatch =", "Strategy(IntEnum): \"\"\"Compression strategies, listed from fastest to strongest. Note :", "the beginning of a frame, and needs to include at", "recorded in the frame header, the frame may or may", "decompressed size is unknown. dictionary_id is a 32-bit unsigned integer", "= namedtuple('values', ['default', 'min', 'max']) compressionLevel_values = _nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel)", "# compressionLevel_values _nt_values = namedtuple('values', ['default', 'min', 'max']) compressionLevel_values =", "'ZstdDict', 'ZstdError', 'decompress', 'get_frame_size', 'compress_stream', 'decompress_stream', 'zstd_version', 'zstd_version_info', 'zstd_support_multithread') #", "frame may or may not need a dictionary to be", "_zstd._ZSTD_c_ldmHashRateLog contentSizeFlag = _zstd._ZSTD_c_contentSizeFlag checksumFlag = _zstd._ZSTD_c_checksumFlag dictIDFlag = _zstd._ZSTD_c_dictIDFlag", "return _nt_frame_info(*ret_tuple) class CParameter(IntEnum): \"\"\"Compression parameters\"\"\" compressionLevel = _zstd._ZSTD_c_compressionLevel windowLog", "parameters\"\"\" compressionLevel = _zstd._ZSTD_c_compressionLevel windowLog = _zstd._ZSTD_c_windowLog hashLog = _zstd._ZSTD_c_hashLog", "It's possible to append more items to the namedtuple in", "enum import IntEnum from ._zstd import * from . import", "beginning of a frame, and needs to include at least", "the frame may or may not need a dictionary to", "and needs to include at least the frame header (6", "nbWorkers = _zstd._ZSTD_c_nbWorkers jobSize = _zstd._ZSTD_c_jobSize overlapLog = _zstd._ZSTD_c_overlapLog def", "= _zstd._ZSTD_btopt btultra = _zstd._ZSTD_btultra btultra2 = _zstd._ZSTD_btultra2 # Set", "is a 32-bit unsigned integer value. 0 means dictionary ID", "namedtuple: (decompressed_size, dictionary_id) If decompressed_size is None, decompressed size is", "of such a dictionary is not specified. It's possible to", "and upper bounds of a parameter, both inclusive.\"\"\" # 1", "to strongest. Note : new strategies _might_ be added in", "_zstd._ZSTD_greedy lazy = _zstd._ZSTD_lazy lazy2 = _zstd._ZSTD_lazy2 btlazy2 = _zstd._ZSTD_btlazy2", "'RichMemZstdCompressor', 'ZstdDecompressor', 'EndlessZstdDecompressor', 'ZstdDict', 'ZstdError', 'decompress', 'get_frame_size', 'compress_stream', 'decompress_stream', 'zstd_version',", "from a frame header. Argument frame_buffer: A bytes-like object. It", "_zstd._train_dict _finalize_dict = _zstd._finalize_dict # compressionLevel_values _nt_values = namedtuple('values', ['default',", "to be decoded, and the ID of such a dictionary", "18 bytes). Return a two-items namedtuple: (decompressed_size, dictionary_id) If decompressed_size", "value. 0 means dictionary ID was not recorded in the", "was not recorded in the frame header, the frame may", "= _zstd._ZSTD_c_targetLength strategy = _zstd._ZSTD_c_strategy enableLongDistanceMatching = _zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog =", "= _zstd._ZSTD_lazy lazy2 = _zstd._ZSTD_lazy2 btlazy2 = _zstd._ZSTD_btlazy2 btopt =", "a dictionary is not specified. It's possible to append more", "compressionLevel = _zstd._ZSTD_c_compressionLevel windowLog = _zstd._ZSTD_c_windowLog hashLog = _zstd._ZSTD_c_hashLog chainLog", "more items to the namedtuple in the future.\"\"\" ret_tuple =", "a 32-bit unsigned integer value. 0 means dictionary ID was", "_zstd._ZSTD_c_chainLog searchLog = _zstd._ZSTD_c_searchLog minMatch = _zstd._ZSTD_c_minMatch targetLength = _zstd._ZSTD_c_targetLength", "class CParameter(IntEnum): \"\"\"Compression parameters\"\"\" compressionLevel = _zstd._ZSTD_c_compressionLevel windowLog = _zstd._ZSTD_c_windowLog", "minMatch = _zstd._ZSTD_c_minMatch targetLength = _zstd._ZSTD_c_targetLength strategy = _zstd._ZSTD_c_strategy enableLongDistanceMatching", "return _zstd._get_param_bounds(0, self.value) class Strategy(IntEnum): \"\"\"Compression strategies, listed from fastest", "def bounds(self): \"\"\"Return lower and upper bounds of a parameter,", "from collections import namedtuple from enum import IntEnum from ._zstd", "a parameter, both inclusive.\"\"\" # 1 means compression parameter return", "is None, decompressed size is unknown. dictionary_id is a 32-bit", "dfast = _zstd._ZSTD_dfast greedy = _zstd._ZSTD_greedy lazy = _zstd._ZSTD_lazy lazy2", "None, decompressed size is unknown. dictionary_id is a 32-bit unsigned", "'ZstdDecompressor', 'EndlessZstdDecompressor', 'ZstdDict', 'ZstdError', 'decompress', 'get_frame_size', 'compress_stream', 'decompress_stream', 'zstd_version', 'zstd_version_info',", "_zstd._ZSTD_dfast greedy = _zstd._ZSTD_greedy lazy = _zstd._ZSTD_lazy lazy2 = _zstd._ZSTD_lazy2", "compressionLevel_values _nt_values = namedtuple('values', ['default', 'min', 'max']) compressionLevel_values = _nt_values(_zstd._ZSTD_defaultCLevel,", "greedy = _zstd._ZSTD_greedy lazy = _zstd._ZSTD_lazy lazy2 = _zstd._ZSTD_lazy2 btlazy2", "_zstd._ZSTD_c_searchLog minMatch = _zstd._ZSTD_c_minMatch targetLength = _zstd._ZSTD_c_targetLength strategy = _zstd._ZSTD_c_strategy", "_nt_values = namedtuple('values', ['default', 'min', 'max']) compressionLevel_values = _nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel,", "= _zstd._train_dict _finalize_dict = _zstd._finalize_dict # compressionLevel_values _nt_values = namedtuple('values',", "unknown. dictionary_id is a 32-bit unsigned integer value. 0 means", "this file 'compressionLevel_values', 'get_frame_info', 'CParameter', 'DParameter', 'Strategy', # From _zstd", "# From _zstd 'ZstdCompressor', 'RichMemZstdCompressor', 'ZstdDecompressor', 'EndlessZstdDecompressor', 'ZstdDict', 'ZstdError', 'decompress',", "Return a two-items namedtuple: (decompressed_size, dictionary_id) If decompressed_size is None,", "= _zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog = _zstd._ZSTD_c_ldmHashLog ldmMinMatch = _zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog =", "_zstd._ZSTD_fast dfast = _zstd._ZSTD_dfast greedy = _zstd._ZSTD_greedy lazy = _zstd._ZSTD_lazy", "from . import _zstd __all__ = (# From this file", "append more items to the namedtuple in the future.\"\"\" ret_tuple", "future.\"\"\" ret_tuple = _zstd._get_frame_info(frame_buffer) return _nt_frame_info(*ret_tuple) class CParameter(IntEnum): \"\"\"Compression parameters\"\"\"", "_zstd._ZSTD_lazy lazy2 = _zstd._ZSTD_lazy2 btlazy2 = _zstd._ZSTD_btlazy2 btopt = _zstd._ZSTD_btopt", "may or may not need a dictionary to be decoded,", "= _zstd._ZSTD_btultra2 # Set CParameter/DParameter types for validity check _zstd._set_parameter_types(CParameter,", "btlazy2 = _zstd._ZSTD_btlazy2 btopt = _zstd._ZSTD_btopt btultra = _zstd._ZSTD_btultra btultra2", "'get_frame_size', 'compress_stream', 'decompress_stream', 'zstd_version', 'zstd_version_info', 'zstd_support_multithread') # Used in __init__.py", "DParameter(IntEnum): \"\"\"Decompression parameters\"\"\" windowLogMax = _zstd._ZSTD_d_windowLogMax def bounds(self): \"\"\"Return lower", "(from fast to strong) is guaranteed. \"\"\" fast = _zstd._ZSTD_fast", "= _zstd._ZSTD_c_nbWorkers jobSize = _zstd._ZSTD_c_jobSize overlapLog = _zstd._ZSTD_c_overlapLog def bounds(self):", "and the ID of such a dictionary is not specified.", "specified. It's possible to append more items to the namedtuple", "frame, and needs to include at least the frame header", "def get_frame_info(frame_buffer): \"\"\"Get zstd frame infomation from a frame header.", "_zstd._ZSTD_btultra btultra2 = _zstd._ZSTD_btultra2 # Set CParameter/DParameter types for validity", "namedtuple('frame_info', ['decompressed_size', 'dictionary_id']) def get_frame_info(frame_buffer): \"\"\"Get zstd frame infomation from", "starts from the beginning of a frame, and needs to", "dictionary is not specified. It's possible to append more items", "'ZstdError', 'decompress', 'get_frame_size', 'compress_stream', 'decompress_stream', 'zstd_version', 'zstd_version_info', 'zstd_support_multithread') # Used", "to append more items to the namedtuple in the future.\"\"\"", "'zstd_version_info', 'zstd_support_multithread') # Used in __init__.py _ZSTD_DStreamInSize = _zstd._ZSTD_DStreamInSize _train_dict", "bytes-like object. It should starts from the beginning of a", "from fastest to strongest. Note : new strategies _might_ be", "in __init__.py _ZSTD_DStreamInSize = _zstd._ZSTD_DStreamInSize _train_dict = _zstd._train_dict _finalize_dict =", "_zstd._get_frame_info(frame_buffer) return _nt_frame_info(*ret_tuple) class CParameter(IntEnum): \"\"\"Compression parameters\"\"\" compressionLevel = _zstd._ZSTD_c_compressionLevel", "ldmMinMatch = _zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog = _zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog = _zstd._ZSTD_c_ldmHashRateLog contentSizeFlag", "enableLongDistanceMatching = _zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog = _zstd._ZSTD_c_ldmHashLog ldmMinMatch = _zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog", "compression parameter return _zstd._get_param_bounds(1, self.value) class DParameter(IntEnum): \"\"\"Decompression parameters\"\"\" windowLogMax", "'max']) compressionLevel_values = _nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel) _nt_frame_info = namedtuple('frame_info', ['decompressed_size',", "checksumFlag = _zstd._ZSTD_c_checksumFlag dictIDFlag = _zstd._ZSTD_c_dictIDFlag nbWorkers = _zstd._ZSTD_c_nbWorkers jobSize", "collections import namedtuple from enum import IntEnum from ._zstd import", "import IntEnum from ._zstd import * from . import _zstd", "_zstd._get_param_bounds(0, self.value) class Strategy(IntEnum): \"\"\"Compression strategies, listed from fastest to", "of a frame, and needs to include at least the", ": new strategies _might_ be added in the future, only", "1 means compression parameter return _zstd._get_param_bounds(1, self.value) class DParameter(IntEnum): \"\"\"Decompression", "= _zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog = _zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog = _zstd._ZSTD_c_ldmHashRateLog contentSizeFlag =", "btultra = _zstd._ZSTD_btultra btultra2 = _zstd._ZSTD_btultra2 # Set CParameter/DParameter types", "= _zstd._ZSTD_c_windowLog hashLog = _zstd._ZSTD_c_hashLog chainLog = _zstd._ZSTD_c_chainLog searchLog =", "upper bounds of a parameter, both inclusive.\"\"\" # 1 means", "strongest. Note : new strategies _might_ be added in the", "_zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel) _nt_frame_info = namedtuple('frame_info', ['decompressed_size', 'dictionary_id']) def get_frame_info(frame_buffer): \"\"\"Get", "to include at least the frame header (6 to 18", "dictionary ID was not recorded in the frame header, the", "be decoded, and the ID of such a dictionary is", "frame header, the frame may or may not need a", "bounds of a parameter, both inclusive.\"\"\" # 1 means compression", "frame header. Argument frame_buffer: A bytes-like object. It should starts", "from the beginning of a frame, and needs to include", "_zstd._ZSTD_c_hashLog chainLog = _zstd._ZSTD_c_chainLog searchLog = _zstd._ZSTD_c_searchLog minMatch = _zstd._ZSTD_c_minMatch", "compressionLevel_values = _nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel) _nt_frame_info = namedtuple('frame_info', ['decompressed_size', 'dictionary_id'])", "CParameter(IntEnum): \"\"\"Compression parameters\"\"\" compressionLevel = _zstd._ZSTD_c_compressionLevel windowLog = _zstd._ZSTD_c_windowLog hashLog", "fast = _zstd._ZSTD_fast dfast = _zstd._ZSTD_dfast greedy = _zstd._ZSTD_greedy lazy", "bounds of a parameter, both inclusive.\"\"\" # 0 means decompression", "decompression parameter return _zstd._get_param_bounds(0, self.value) class Strategy(IntEnum): \"\"\"Compression strategies, listed", "least the frame header (6 to 18 bytes). Return a", "frame_buffer: A bytes-like object. It should starts from the beginning", "or may not need a dictionary to be decoded, and", "jobSize = _zstd._ZSTD_c_jobSize overlapLog = _zstd._ZSTD_c_overlapLog def bounds(self): \"\"\"Return lower", "_zstd._ZSTD_c_nbWorkers jobSize = _zstd._ZSTD_c_jobSize overlapLog = _zstd._ZSTD_c_overlapLog def bounds(self): \"\"\"Return", "(6 to 18 bytes). Return a two-items namedtuple: (decompressed_size, dictionary_id)", "may not need a dictionary to be decoded, and the", "a frame header. Argument frame_buffer: A bytes-like object. It should", "fastest to strongest. Note : new strategies _might_ be added", "_zstd._ZSTD_c_jobSize overlapLog = _zstd._ZSTD_c_overlapLog def bounds(self): \"\"\"Return lower and upper", "means decompression parameter return _zstd._get_param_bounds(0, self.value) class Strategy(IntEnum): \"\"\"Compression strategies,", "file 'compressionLevel_values', 'get_frame_info', 'CParameter', 'DParameter', 'Strategy', # From _zstd 'ZstdCompressor',", "_zstd._ZSTD_c_enableLongDistanceMatching ldmHashLog = _zstd._ZSTD_c_ldmHashLog ldmMinMatch = _zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog = _zstd._ZSTD_c_ldmBucketSizeLog", "needs to include at least the frame header (6 to", "= _zstd._ZSTD_d_windowLogMax def bounds(self): \"\"\"Return lower and upper bounds of", "_zstd._ZSTD_c_minMatch targetLength = _zstd._ZSTD_c_targetLength strategy = _zstd._ZSTD_c_strategy enableLongDistanceMatching = _zstd._ZSTD_c_enableLongDistanceMatching", "= _zstd._finalize_dict # compressionLevel_values _nt_values = namedtuple('values', ['default', 'min', 'max'])", "= _zstd._ZSTD_dfast greedy = _zstd._ZSTD_greedy lazy = _zstd._ZSTD_lazy lazy2 =", "_nt_values(_zstd._ZSTD_defaultCLevel, _zstd._ZSTD_minCLevel, _zstd._ZSTD_maxCLevel) _nt_frame_info = namedtuple('frame_info', ['decompressed_size', 'dictionary_id']) def get_frame_info(frame_buffer):", "frame header (6 to 18 bytes). Return a two-items namedtuple:", "32-bit unsigned integer value. 0 means dictionary ID was not", "= _zstd._ZSTD_c_minMatch targetLength = _zstd._ZSTD_c_targetLength strategy = _zstd._ZSTD_c_strategy enableLongDistanceMatching =", "= _zstd._ZSTD_c_jobSize overlapLog = _zstd._ZSTD_c_overlapLog def bounds(self): \"\"\"Return lower and", "= _zstd._ZSTD_c_chainLog searchLog = _zstd._ZSTD_c_searchLog minMatch = _zstd._ZSTD_c_minMatch targetLength =", "__init__.py _ZSTD_DStreamInSize = _zstd._ZSTD_DStreamInSize _train_dict = _zstd._train_dict _finalize_dict = _zstd._finalize_dict", "a parameter, both inclusive.\"\"\" # 0 means decompression parameter return", "inclusive.\"\"\" # 0 means decompression parameter return _zstd._get_param_bounds(0, self.value) class", "If decompressed_size is None, decompressed size is unknown. dictionary_id is", "_zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog = _zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog = _zstd._ZSTD_c_ldmHashRateLog contentSizeFlag = _zstd._ZSTD_c_contentSizeFlag", "windowLogMax = _zstd._ZSTD_d_windowLogMax def bounds(self): \"\"\"Return lower and upper bounds", "_zstd._ZSTD_c_windowLog hashLog = _zstd._ZSTD_c_hashLog chainLog = _zstd._ZSTD_c_chainLog searchLog = _zstd._ZSTD_c_searchLog", "order (from fast to strong) is guaranteed. \"\"\" fast =", "Used in __init__.py _ZSTD_DStreamInSize = _zstd._ZSTD_DStreamInSize _train_dict = _zstd._train_dict _finalize_dict", "_finalize_dict = _zstd._finalize_dict # compressionLevel_values _nt_values = namedtuple('values', ['default', 'min',", "integer value. 0 means dictionary ID was not recorded in", "= _zstd._ZSTD_c_ldmHashLog ldmMinMatch = _zstd._ZSTD_c_ldmMinMatch ldmBucketSizeLog = _zstd._ZSTD_c_ldmBucketSizeLog ldmHashRateLog =", "A bytes-like object. It should starts from the beginning of" ]
[ "item.id) for item1, item2 in zip(collection, objects_l): if item1.id is", "= {c.name for c in constraint.columns} if set(fields) == col_names:", "(Assume all objects have the id field as their primary", "has_unique_constraint(table, fields): for constraint in table.constraints: if isinstance(constraint, UniqueConstraint): col_names", "== object2.id. \"\"\" if len(collection) != len(objects): # False if", "from uuid import uuid4 import pytest from sqlalchemy import (", "test. Assumptions: if the class under test is Foo, then", "list(objects) objects_l.sort(key=lambda item: item.id) for item1, item2 in zip(collection, objects_l):", "from session, so that on a subsequent load from the", "is impractical. (Assume all objects have the id field as", "delete from database on exit, bypassing the session. If obj", "WHERE clause should be provided to construct a custom select", "cls_(self): \"\"\" Return class under test. Assumptions: if the class", "sqlalchemy import ( delete, select, UniqueConstraint, ) class AbstractBaseTest(ABC): @pytest.fixture", "to store obj in database, then remove obj from session,", "subclass of BaseTest, named TestFoo. \"\"\" prefix = len(\"Test\") class_name", "return True def collection_consists_of_objects(collection, *objects): \"\"\" Returns True iff list(collection)", "the class under test is Foo, then the class grouping", "is None: where_clause = _get_default_where_clause(cls, obj_id) stmt = select(cls).where(where_clause) result", "list(collection) == list(objects), where object equality is determined by primary", "return False return True def get_unique_value(): \"\"\"Generate unique values to", "ABC, abstractmethod from contextlib import contextmanager from uuid import uuid4", "primary key.) \"\"\" # Ensure we have a list of", "as their primary key.) \"\"\" # Ensure we have a", "does not have an id field, a SQLAlchemy WHERE clause", "in table.indexes: col_names = {c.name for c in index.columns} if", "subsequent load from the database we get a clean instance.", "test is Foo, then the class grouping the tests should", "or item1.id != item2.id: return False return True def get_unique_value():", "of BaseTest, named TestFoo. \"\"\" prefix = len(\"Test\") class_name =", "bool(obj_id) ^ (where_clause is not None) if where_clause is None:", "from contextlib import contextmanager from uuid import uuid4 import pytest", "obj.id) session.execute(stmt) def get_stored_obj(session, cls, obj_id=None, where_clause=None, unique=False): # Either", "context manager is impractical. (Assume all objects have the id", "where_clause): yield obj @contextmanager def dbcleanup(session, obj, where_clause=None): \"\"\" Use", "if not isinstance(objects, list): objects = [objects] for obj in", "unique constraints.\"\"\" return uuid4().hex def _get_default_where_clause(cls, obj_id): where_clause = cls.__table__.c.id", "def get_model(self): pass def dbcleanup_wrapper(session, obj, where_clause=None): with dbcleanup(session, obj,", "list of objects (check for list explicitly: a model can", "\"\"\" Return class under test. Assumptions: if the class under", "item: item.id) for item1, item2 in zip(collection, objects_l): if item1.id", "a custom select statement. \"\"\" return_id = where_clause is None", "not isinstance(objects, list): objects = [objects] for obj in objects:", "# True if both are empty return True # Sort,", "table = obj.__table__ stmt = delete(table).where(table.c.id == obj.id) session.execute(stmt) def", "objects_l = list(objects) objects_l.sort(key=lambda item: item.id) for item1, item2 in", "save this before obj is expunged session.expunge(obj) return obj_id def", "class under test. Assumptions: if the class under test is", "equality is determined by primary key equality: object1.id == object2.id.", "to construct a custom select statement. \"\"\" return_id = where_clause", ") class AbstractBaseTest(ABC): @pytest.fixture def cls_(self): \"\"\" Return class under", "where_clause = _get_default_where_clause(type(obj), obj_id) stmt = delete(table).where(where_clause) session.execute(stmt) def persist(session,", "session.flush() obj_id = obj.id if return_id else None # save", "be called at the end of a test if use", "not collection: # True if both are empty return True", "where_clause is None: where_clause = _get_default_where_clause(type(obj), obj_id) stmt = delete(table).where(where_clause)", "def delete_from_database(session, objects): \"\"\" Delete each object in objects from", "yield obj_id finally: table = obj.__table__ if where_clause is None:", "obj @contextmanager def dbcleanup(session, obj, where_clause=None): \"\"\" Use the session", "@contextmanager def dbcleanup(session, obj, where_clause=None): \"\"\" Use the session to", "item2.id is None or item1.id != item2.id: return False return", "return True # Sort, then compare each member by its", "\"\"\" session.add(obj) session.flush() obj_id = obj.id if return_id else None", "obj, where_clause=None): \"\"\" Use the session to store obj in", "session.add(obj) session.flush() obj_id = obj.id if return_id else None #", "to accommodate unique constraints.\"\"\" return uuid4().hex def _get_default_where_clause(cls, obj_id): where_clause", "instance. \"\"\" session.add(obj) session.flush() obj_id = obj.id if return_id else", "a list of objects (check for list explicitly: a model", "object2.id. \"\"\" if len(collection) != len(objects): # False if lengths", "explicitly: a model can be iterable) if not isinstance(objects, list):", "def _get_default_where_clause(cls, obj_id): where_clause = cls.__table__.c.id == obj_id return where_clause", "return_id) yield obj_id finally: table = obj.__table__ if where_clause is", "collection.sort(key=lambda item: item.id) objects_l = list(objects) objects_l.sort(key=lambda item: item.id) for", "obj, where_clause): yield obj @contextmanager def dbcleanup(session, obj, where_clause=None): \"\"\"", "is None or item2.id is None or item1.id != item2.id:", "of a context manager is impractical. (Assume all objects have", "else None # save this before obj is expunged session.expunge(obj)", "import pytest from sqlalchemy import ( delete, select, UniqueConstraint, )", "class under test is Foo, then the class grouping the", "self.__class__.__name__[prefix:] return getattr(self.get_model(), class_name) @abstractmethod def get_model(self): pass def dbcleanup_wrapper(session,", "uuid import uuid4 import pytest from sqlalchemy import ( delete,", "obj.__table__ stmt = delete(table).where(table.c.id == obj.id) session.execute(stmt) def get_stored_obj(session, cls,", "index in table.indexes: col_names = {c.name for c in index.columns}", "obj, return_id=True): \"\"\" Use the session to store obj in", "store obj in database; delete from database on exit, bypassing", "session, so that on a subsequent load from the database", "== col_names: return True def has_index(table, fields): for index in", "its primary key. collection.sort(key=lambda item: item.id) objects_l = list(objects) objects_l.sort(key=lambda", "the session to store obj in database; delete from database", "not have an id field, a SQLAlchemy WHERE clause should", "# Either obj_id or where_clause must be provided, but not", "obj_id = persist(session, obj, return_id) yield obj_id finally: table =", "True iff list(collection) == list(objects), where object equality is determined", "the id field as their primary key.) \"\"\" # Ensure", "store obj in database, then remove obj from session, so", "for item1, item2 in zip(collection, objects_l): if item1.id is None", "all objects have the id field as their primary key.)", "getattr(self.get_model(), class_name) @abstractmethod def get_model(self): pass def dbcleanup_wrapper(session, obj, where_clause=None):", "assert bool(obj_id) ^ (where_clause is not None) if where_clause is", "obj does not have an id field, a SQLAlchemy WHERE", "Assumptions: if the class under test is Foo, then the", "\"\"\" Returns True iff list(collection) == list(objects), where object equality", "\"\"\" prefix = len(\"Test\") class_name = self.__class__.__name__[prefix:] return getattr(self.get_model(), class_name)", "database. May be called at the end of a test", "for c in index.columns} if set(fields) == col_names: return True", "select statement. \"\"\" return_id = where_clause is None try: obj_id", "in index.columns} if set(fields) == col_names: return True def collection_consists_of_objects(collection,", "True def collection_consists_of_objects(collection, *objects): \"\"\" Returns True iff list(collection) ==", "is None: where_clause = _get_default_where_clause(type(obj), obj_id) stmt = delete(table).where(where_clause) session.execute(stmt)", "uuid4().hex def _get_default_where_clause(cls, obj_id): where_clause = cls.__table__.c.id == obj_id return", "set(fields) == col_names: return True def has_index(table, fields): for index", "index.columns} if set(fields) == col_names: return True def collection_consists_of_objects(collection, *objects):", "if both are empty return True # Sort, then compare", "provided, but not both assert bool(obj_id) ^ (where_clause is not", "zip(collection, objects_l): if item1.id is None or item2.id is None", "under test. Assumptions: if the class under test is Foo,", "on a subsequent load from the database we get a", "True # Sort, then compare each member by its 'id'", "col_names = {c.name for c in constraint.columns} if set(fields) ==", "UniqueConstraint, ) class AbstractBaseTest(ABC): @pytest.fixture def cls_(self): \"\"\" Return class", "obj from session, so that on a subsequent load from", "select(cls).where(where_clause) result = session.execute(stmt) # unique() is required if result", "can be iterable) if not isinstance(objects, list): objects = [objects]", "key. collection.sort(key=lambda item: item.id) objects_l = list(objects) objects_l.sort(key=lambda item: item.id)", "session.execute(stmt) # unique() is required if result contains joint eager", "= list(objects) objects_l.sort(key=lambda item: item.id) for item1, item2 in zip(collection,", "empty return True # Sort, then compare each member by", "dbcleanup(session, obj, where_clause=None): \"\"\" Use the session to store obj", "if the class under test is Foo, then the class", "but not both assert bool(obj_id) ^ (where_clause is not None)", "determined by primary key equality: object1.id == object2.id. \"\"\" if", "set(fields) == col_names: return True def collection_consists_of_objects(collection, *objects): \"\"\" Returns", "obj.__table__ if where_clause is None: where_clause = _get_default_where_clause(type(obj), obj_id) stmt", "clean instance. \"\"\" session.add(obj) session.flush() obj_id = obj.id if return_id", "against collections # https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if unique: result = result.unique() return", "table.indexes: col_names = {c.name for c in index.columns} if set(fields)", "def dbcleanup(session, obj, where_clause=None): \"\"\" Use the session to store", "unique() is required if result contains joint eager loads against", "obj in database, then remove obj from session, so that", "True if both are empty return True # Sort, then", "persist(session, obj, return_id=True): \"\"\" Use the session to store obj", "session to store obj in database; delete from database on", "is expunged session.expunge(obj) return obj_id def delete_from_database(session, objects): \"\"\" Delete", "item2 in zip(collection, objects_l): if item1.id is None or item2.id", "item1, item2 in zip(collection, objects_l): if item1.id is None or", "finally: table = obj.__table__ if where_clause is None: where_clause =", "obj, where_clause=None): with dbcleanup(session, obj, where_clause): yield obj @contextmanager def", "session.execute(stmt) def persist(session, obj, return_id=True): \"\"\" Use the session to", "of a test if use of a context manager is", "a context manager is impractical. (Assume all objects have the", "then compare each member by its 'id' attribute, which must", "that on a subsequent load from the database we get", "field as their primary key.) \"\"\" # Ensure we have", "fields): for constraint in table.constraints: if isinstance(constraint, UniqueConstraint): col_names =", "where_clause=None): with dbcleanup(session, obj, where_clause): yield obj @contextmanager def dbcleanup(session,", "def has_unique_constraint(table, fields): for constraint in table.constraints: if isinstance(constraint, UniqueConstraint):", "if isinstance(constraint, UniqueConstraint): col_names = {c.name for c in constraint.columns}", "persist(session, obj, return_id) yield obj_id finally: table = obj.__table__ if", "are empty return True # Sort, then compare each member", "\"\"\" # Ensure we have a list of objects (check", "return result.scalar_one() def has_unique_constraint(table, fields): for constraint in table.constraints: if", "isinstance(constraint, UniqueConstraint): col_names = {c.name for c in constraint.columns} if", "contains joint eager loads against collections # https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if unique:", "None # save this before obj is expunged session.expunge(obj) return", "field, a SQLAlchemy WHERE clause should be provided to construct", "primary key. collection.sort(key=lambda item: item.id) objects_l = list(objects) objects_l.sort(key=lambda item:", "a subsequent load from the database we get a clean", "prefix = len(\"Test\") class_name = self.__class__.__name__[prefix:] return getattr(self.get_model(), class_name) @abstractmethod", "objects: table = obj.__table__ stmt = delete(table).where(table.c.id == obj.id) session.execute(stmt)", "in database, then remove obj from session, so that on", "return False if not collection: # True if both are", "Use the session to store obj in database; delete from", "be provided, but not both assert bool(obj_id) ^ (where_clause is", "return getattr(self.get_model(), class_name) @abstractmethod def get_model(self): pass def dbcleanup_wrapper(session, obj,", "item1.id != item2.id: return False return True def get_unique_value(): \"\"\"Generate", "col_names = {c.name for c in index.columns} if set(fields) ==", "model can be iterable) if not isinstance(objects, list): objects =", "for index in table.indexes: col_names = {c.name for c in", "= _get_default_where_clause(cls, obj_id) stmt = select(cls).where(where_clause) result = session.execute(stmt) #", "lengths are different return False if not collection: # True", "# False if lengths are different return False if not", "bypassing the session. If obj does not have an id", "= _get_default_where_clause(type(obj), obj_id) stmt = delete(table).where(where_clause) session.execute(stmt) def persist(session, obj,", "from database on exit, bypassing the session. If obj does", "len(objects): # False if lengths are different return False if", "for list explicitly: a model can be iterable) if not", "if lengths are different return False if not collection: #", "has_index(table, fields): for index in table.indexes: col_names = {c.name for", "len(\"Test\") class_name = self.__class__.__name__[prefix:] return getattr(self.get_model(), class_name) @abstractmethod def get_model(self):", "AbstractBaseTest(ABC): @pytest.fixture def cls_(self): \"\"\" Return class under test. Assumptions:", "for constraint in table.constraints: if isinstance(constraint, UniqueConstraint): col_names = {c.name", "session to store obj in database, then remove obj from", "True def get_unique_value(): \"\"\"Generate unique values to accommodate unique constraints.\"\"\"", "== col_names: return True def collection_consists_of_objects(collection, *objects): \"\"\" Returns True", "_get_default_where_clause(cls, obj_id) stmt = select(cls).where(where_clause) result = session.execute(stmt) # unique()", "result.unique() return result.scalar_one() def has_unique_constraint(table, fields): for constraint in table.constraints:", "obj_id=None, where_clause=None, unique=False): # Either obj_id or where_clause must be", "is not None) if where_clause is None: where_clause = _get_default_where_clause(cls,", "class_name = self.__class__.__name__[prefix:] return getattr(self.get_model(), class_name) @abstractmethod def get_model(self): pass", "unique values to accommodate unique constraints.\"\"\" return uuid4().hex def _get_default_where_clause(cls,", "database we get a clean instance. \"\"\" session.add(obj) session.flush() obj_id", "def get_stored_obj(session, cls, obj_id=None, where_clause=None, unique=False): # Either obj_id or", "== list(objects), where object equality is determined by primary key", "object1.id == object2.id. \"\"\" if len(collection) != len(objects): # False", "obj is expunged session.expunge(obj) return obj_id def delete_from_database(session, objects): \"\"\"", "Delete each object in objects from database. May be called", "should be a subclass of BaseTest, named TestFoo. \"\"\" prefix", "get a clean instance. \"\"\" session.add(obj) session.flush() obj_id = obj.id", "None: where_clause = _get_default_where_clause(type(obj), obj_id) stmt = delete(table).where(where_clause) session.execute(stmt) def", "where_clause is None: where_clause = _get_default_where_clause(cls, obj_id) stmt = select(cls).where(where_clause)", "key equality: object1.id == object2.id. \"\"\" if len(collection) != len(objects):", "May be called at the end of a test if", "if use of a context manager is impractical. (Assume all", "def cls_(self): \"\"\" Return class under test. Assumptions: if the", "contextlib import contextmanager from uuid import uuid4 import pytest from", "which must be its primary key. collection.sort(key=lambda item: item.id) objects_l", "database, then remove obj from session, so that on a", "iterable) if not isinstance(objects, list): objects = [objects] for obj", "Foo, then the class grouping the tests should be a", "be a subclass of BaseTest, named TestFoo. \"\"\" prefix =", "list(objects), where object equality is determined by primary key equality:", "at the end of a test if use of a", "None: where_clause = _get_default_where_clause(cls, obj_id) stmt = select(cls).where(where_clause) result =", "where_clause is None try: obj_id = persist(session, obj, return_id) yield", "result contains joint eager loads against collections # https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if", "is required if result contains joint eager loads against collections", "*objects): \"\"\" Returns True iff list(collection) == list(objects), where object", "a clean instance. \"\"\" session.add(obj) session.flush() obj_id = obj.id if", "c in index.columns} if set(fields) == col_names: return True def", "return_id else None # save this before obj is expunged", "return True def has_index(table, fields): for index in table.indexes: col_names", "None or item2.id is None or item1.id != item2.id: return", "= obj.__table__ if where_clause is None: where_clause = _get_default_where_clause(type(obj), obj_id)", "\"\"\" Delete each object in objects from database. May be", "objects_l.sort(key=lambda item: item.id) for item1, item2 in zip(collection, objects_l): if", "obj_id finally: table = obj.__table__ if where_clause is None: where_clause", "must be its primary key. collection.sort(key=lambda item: item.id) objects_l =", "exit, bypassing the session. If obj does not have an", "import ( delete, select, UniqueConstraint, ) class AbstractBaseTest(ABC): @pytest.fixture def", "for c in constraint.columns} if set(fields) == col_names: return True", "accommodate unique constraints.\"\"\" return uuid4().hex def _get_default_where_clause(cls, obj_id): where_clause =", "or item2.id is None or item1.id != item2.id: return False", "table = obj.__table__ if where_clause is None: where_clause = _get_default_where_clause(type(obj),", "Return class under test. Assumptions: if the class under test", "under test is Foo, then the class grouping the tests", "end of a test if use of a context manager", "UniqueConstraint): col_names = {c.name for c in constraint.columns} if set(fields)", "use of a context manager is impractical. (Assume all objects", "where_clause = _get_default_where_clause(cls, obj_id) stmt = select(cls).where(where_clause) result = session.execute(stmt)", "obj_id) stmt = delete(table).where(where_clause) session.execute(stmt) def persist(session, obj, return_id=True): \"\"\"", "True def has_index(table, fields): for index in table.indexes: col_names =", "def collection_consists_of_objects(collection, *objects): \"\"\" Returns True iff list(collection) == list(objects),", "in database; delete from database on exit, bypassing the session.", "if not collection: # True if both are empty return", "be iterable) if not isinstance(objects, list): objects = [objects] for", "uuid4 import pytest from sqlalchemy import ( delete, select, UniqueConstraint,", "obj_id) stmt = select(cls).where(where_clause) result = session.execute(stmt) # unique() is", "= result.unique() return result.scalar_one() def has_unique_constraint(table, fields): for constraint in", "delete(table).where(where_clause) session.execute(stmt) def persist(session, obj, return_id=True): \"\"\" Use the session", "@pytest.fixture def cls_(self): \"\"\" Return class under test. Assumptions: if", "collection: # True if both are empty return True #", "!= item2.id: return False return True def get_unique_value(): \"\"\"Generate unique", "= session.execute(stmt) # unique() is required if result contains joint", "in zip(collection, objects_l): if item1.id is None or item2.id is", "if set(fields) == col_names: return True def collection_consists_of_objects(collection, *objects): \"\"\"", "import uuid4 import pytest from sqlalchemy import ( delete, select,", "the database we get a clean instance. \"\"\" session.add(obj) session.flush()", "= obj.__table__ stmt = delete(table).where(table.c.id == obj.id) session.execute(stmt) def get_stored_obj(session,", "collections # https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if unique: result = result.unique() return result.scalar_one()", "have a list of objects (check for list explicitly: a", "member by its 'id' attribute, which must be its primary", "SQLAlchemy WHERE clause should be provided to construct a custom", "their primary key.) \"\"\" # Ensure we have a list", "session.execute(stmt) def get_stored_obj(session, cls, obj_id=None, where_clause=None, unique=False): # Either obj_id", "both are empty return True # Sort, then compare each", "item.id) objects_l = list(objects) objects_l.sort(key=lambda item: item.id) for item1, item2", "item: item.id) objects_l = list(objects) objects_l.sort(key=lambda item: item.id) for item1,", "stmt = select(cls).where(where_clause) result = session.execute(stmt) # unique() is required", "load from the database we get a clean instance. \"\"\"", "required if result contains joint eager loads against collections #", "impractical. (Assume all objects have the id field as their", "= delete(table).where(table.c.id == obj.id) session.execute(stmt) def get_stored_obj(session, cls, obj_id=None, where_clause=None,", "we have a list of objects (check for list explicitly:", "obj.id if return_id else None # save this before obj", "constraint in table.constraints: if isinstance(constraint, UniqueConstraint): col_names = {c.name for", "not both assert bool(obj_id) ^ (where_clause is not None) if", "class AbstractBaseTest(ABC): @pytest.fixture def cls_(self): \"\"\" Return class under test.", "return_id=True): \"\"\" Use the session to store obj in database,", "= self.__class__.__name__[prefix:] return getattr(self.get_model(), class_name) @abstractmethod def get_model(self): pass def", "None) if where_clause is None: where_clause = _get_default_where_clause(cls, obj_id) stmt", "# Ensure we have a list of objects (check for", "is None or item1.id != item2.id: return False return True", "a subclass of BaseTest, named TestFoo. \"\"\" prefix = len(\"Test\")", "^ (where_clause is not None) if where_clause is None: where_clause", "Ensure we have a list of objects (check for list", "is determined by primary key equality: object1.id == object2.id. \"\"\"", "(check for list explicitly: a model can be iterable) if", "provided to construct a custom select statement. \"\"\" return_id =", "in constraint.columns} if set(fields) == col_names: return True def has_index(table,", "objects from database. May be called at the end of", "get_model(self): pass def dbcleanup_wrapper(session, obj, where_clause=None): with dbcleanup(session, obj, where_clause):", "\"\"\" Use the session to store obj in database, then", "TestFoo. \"\"\" prefix = len(\"Test\") class_name = self.__class__.__name__[prefix:] return getattr(self.get_model(),", "Sort, then compare each member by its 'id' attribute, which", "import contextmanager from uuid import uuid4 import pytest from sqlalchemy", "obj, return_id) yield obj_id finally: table = obj.__table__ if where_clause", "collection_consists_of_objects(collection, *objects): \"\"\" Returns True iff list(collection) == list(objects), where", "col_names: return True def collection_consists_of_objects(collection, *objects): \"\"\" Returns True iff", "objects_l): if item1.id is None or item2.id is None or", "import ABC, abstractmethod from contextlib import contextmanager from uuid import", "fields): for index in table.indexes: col_names = {c.name for c", "return_id = where_clause is None try: obj_id = persist(session, obj,", "if set(fields) == col_names: return True def has_index(table, fields): for", "are different return False if not collection: # True if", "id field as their primary key.) \"\"\" # Ensure we", "abstractmethod from contextlib import contextmanager from uuid import uuid4 import", "database; delete from database on exit, bypassing the session. If", "item1.id is None or item2.id is None or item1.id !=", "None try: obj_id = persist(session, obj, return_id) yield obj_id finally:", "or where_clause must be provided, but not both assert bool(obj_id)", "if len(collection) != len(objects): # False if lengths are different", "session. If obj does not have an id field, a", "called at the end of a test if use of", "get_unique_value(): \"\"\"Generate unique values to accommodate unique constraints.\"\"\" return uuid4().hex", "dbcleanup_wrapper(session, obj, where_clause=None): with dbcleanup(session, obj, where_clause): yield obj @contextmanager", "pass def dbcleanup_wrapper(session, obj, where_clause=None): with dbcleanup(session, obj, where_clause): yield", "if where_clause is None: where_clause = _get_default_where_clause(cls, obj_id) stmt =", "delete_from_database(session, objects): \"\"\" Delete each object in objects from database.", "!= len(objects): # False if lengths are different return False", "= obj.id if return_id else None # save this before", "if result contains joint eager loads against collections # https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253", "then remove obj from session, so that on a subsequent", "= {c.name for c in index.columns} if set(fields) == col_names:", "\"\"\" return_id = where_clause is None try: obj_id = persist(session,", "{c.name for c in index.columns} if set(fields) == col_names: return", "result = session.execute(stmt) # unique() is required if result contains", "in objects from database. May be called at the end", "get_stored_obj(session, cls, obj_id=None, where_clause=None, unique=False): # Either obj_id or where_clause", "= delete(table).where(where_clause) session.execute(stmt) def persist(session, obj, return_id=True): \"\"\" Use the", "result.scalar_one() def has_unique_constraint(table, fields): for constraint in table.constraints: if isinstance(constraint,", "objects): \"\"\" Delete each object in objects from database. May", "attribute, which must be its primary key. collection.sort(key=lambda item: item.id)", "= len(\"Test\") class_name = self.__class__.__name__[prefix:] return getattr(self.get_model(), class_name) @abstractmethod def", "different return False if not collection: # True if both", "custom select statement. \"\"\" return_id = where_clause is None try:", "is None try: obj_id = persist(session, obj, return_id) yield obj_id", "None or item1.id != item2.id: return False return True def", "both assert bool(obj_id) ^ (where_clause is not None) if where_clause", "col_names: return True def has_index(table, fields): for index in table.indexes:", "isinstance(objects, list): objects = [objects] for obj in objects: table", "session.expunge(obj) return obj_id def delete_from_database(session, objects): \"\"\" Delete each object", "return True def get_unique_value(): \"\"\"Generate unique values to accommodate unique", "from abc import ABC, abstractmethod from contextlib import contextmanager from", "tests should be a subclass of BaseTest, named TestFoo. \"\"\"", "yield obj @contextmanager def dbcleanup(session, obj, where_clause=None): \"\"\" Use the", "where_clause must be provided, but not both assert bool(obj_id) ^", "for obj in objects: table = obj.__table__ stmt = delete(table).where(table.c.id", "not None) if where_clause is None: where_clause = _get_default_where_clause(cls, obj_id)", "is Foo, then the class grouping the tests should be", "object equality is determined by primary key equality: object1.id ==", "this before obj is expunged session.expunge(obj) return obj_id def delete_from_database(session,", "dbcleanup(session, obj, where_clause): yield obj @contextmanager def dbcleanup(session, obj, where_clause=None):", "a model can be iterable) if not isinstance(objects, list): objects", "unique=False): # Either obj_id or where_clause must be provided, but", "with dbcleanup(session, obj, where_clause): yield obj @contextmanager def dbcleanup(session, obj,", "(where_clause is not None) if where_clause is None: where_clause =", "if item1.id is None or item2.id is None or item1.id", "we get a clean instance. \"\"\" session.add(obj) session.flush() obj_id =", "be its primary key. collection.sort(key=lambda item: item.id) objects_l = list(objects)", "objects (check for list explicitly: a model can be iterable)", "abc import ABC, abstractmethod from contextlib import contextmanager from uuid", "def has_index(table, fields): for index in table.indexes: col_names = {c.name", "by primary key equality: object1.id == object2.id. \"\"\" if len(collection)", "\"\"\" if len(collection) != len(objects): # False if lengths are", "== obj.id) session.execute(stmt) def get_stored_obj(session, cls, obj_id=None, where_clause=None, unique=False): #", "objects have the id field as their primary key.) \"\"\"", "# save this before obj is expunged session.expunge(obj) return obj_id", "item2.id: return False return True def get_unique_value(): \"\"\"Generate unique values", "result = result.unique() return result.scalar_one() def has_unique_constraint(table, fields): for constraint", "if where_clause is None: where_clause = _get_default_where_clause(type(obj), obj_id) stmt =", "must be provided, but not both assert bool(obj_id) ^ (where_clause", "eager loads against collections # https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if unique: result =", "should be provided to construct a custom select statement. \"\"\"", "constraints.\"\"\" return uuid4().hex def _get_default_where_clause(cls, obj_id): where_clause = cls.__table__.c.id ==", "= select(cls).where(where_clause) result = session.execute(stmt) # unique() is required if", "obj in objects: table = obj.__table__ stmt = delete(table).where(table.c.id ==", "list): objects = [objects] for obj in objects: table =", "values to accommodate unique constraints.\"\"\" return uuid4().hex def _get_default_where_clause(cls, obj_id):", "statement. \"\"\" return_id = where_clause is None try: obj_id =", "test if use of a context manager is impractical. (Assume", "pytest from sqlalchemy import ( delete, select, UniqueConstraint, ) class", "before obj is expunged session.expunge(obj) return obj_id def delete_from_database(session, objects):", "if unique: result = result.unique() return result.scalar_one() def has_unique_constraint(table, fields):", "the session. If obj does not have an id field,", "by its 'id' attribute, which must be its primary key.", "= [objects] for obj in objects: table = obj.__table__ stmt", "def persist(session, obj, return_id=True): \"\"\" Use the session to store", "Either obj_id or where_clause must be provided, but not both", "from sqlalchemy import ( delete, select, UniqueConstraint, ) class AbstractBaseTest(ABC):", "loads against collections # https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if unique: result = result.unique()", "clause should be provided to construct a custom select statement.", "= where_clause is None try: obj_id = persist(session, obj, return_id)", "obj_id = obj.id if return_id else None # save this", "try: obj_id = persist(session, obj, return_id) yield obj_id finally: table", "list explicitly: a model can be iterable) if not isinstance(objects,", "class grouping the tests should be a subclass of BaseTest,", "key.) \"\"\" # Ensure we have a list of objects", "_get_default_where_clause(type(obj), obj_id) stmt = delete(table).where(where_clause) session.execute(stmt) def persist(session, obj, return_id=True):", "obj_id or where_clause must be provided, but not both assert", "( delete, select, UniqueConstraint, ) class AbstractBaseTest(ABC): @pytest.fixture def cls_(self):", "False return True def get_unique_value(): \"\"\"Generate unique values to accommodate", "If obj does not have an id field, a SQLAlchemy", "class_name) @abstractmethod def get_model(self): pass def dbcleanup_wrapper(session, obj, where_clause=None): with", "delete(table).where(table.c.id == obj.id) session.execute(stmt) def get_stored_obj(session, cls, obj_id=None, where_clause=None, unique=False):", "= persist(session, obj, return_id) yield obj_id finally: table = obj.__table__", "equality: object1.id == object2.id. \"\"\" if len(collection) != len(objects): #", "return uuid4().hex def _get_default_where_clause(cls, obj_id): where_clause = cls.__table__.c.id == obj_id", "len(collection) != len(objects): # False if lengths are different return", "stmt = delete(table).where(where_clause) session.execute(stmt) def persist(session, obj, return_id=True): \"\"\" Use", "Use the session to store obj in database, then remove", "False if lengths are different return False if not collection:", "so that on a subsequent load from the database we", "its 'id' attribute, which must be its primary key. collection.sort(key=lambda", "of objects (check for list explicitly: a model can be", "remove obj from session, so that on a subsequent load", "if return_id else None # save this before obj is", "stmt = delete(table).where(table.c.id == obj.id) session.execute(stmt) def get_stored_obj(session, cls, obj_id=None,", "the session to store obj in database, then remove obj", "obj in database; delete from database on exit, bypassing the", "# Sort, then compare each member by its 'id' attribute,", "\"\"\" Use the session to store obj in database; delete", "id field, a SQLAlchemy WHERE clause should be provided to", "database on exit, bypassing the session. If obj does not", "@abstractmethod def get_model(self): pass def dbcleanup_wrapper(session, obj, where_clause=None): with dbcleanup(session,", "from database. May be called at the end of a", "constraint.columns} if set(fields) == col_names: return True def has_index(table, fields):", "a test if use of a context manager is impractical.", "from the database we get a clean instance. \"\"\" session.add(obj)", "# unique() is required if result contains joint eager loads", "cls, obj_id=None, where_clause=None, unique=False): # Either obj_id or where_clause must", "compare each member by its 'id' attribute, which must be", "a SQLAlchemy WHERE clause should be provided to construct a", "in objects: table = obj.__table__ stmt = delete(table).where(table.c.id == obj.id)", "an id field, a SQLAlchemy WHERE clause should be provided", "each object in objects from database. May be called at", "[objects] for obj in objects: table = obj.__table__ stmt =", "Returns True iff list(collection) == list(objects), where object equality is", "https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if unique: result = result.unique() return result.scalar_one() def has_unique_constraint(table,", "where_clause=None, unique=False): # Either obj_id or where_clause must be provided,", "object in objects from database. May be called at the", "primary key equality: object1.id == object2.id. \"\"\" if len(collection) !=", "manager is impractical. (Assume all objects have the id field", "\"\"\"Generate unique values to accommodate unique constraints.\"\"\" return uuid4().hex def", "each member by its 'id' attribute, which must be its", "on exit, bypassing the session. If obj does not have", "the tests should be a subclass of BaseTest, named TestFoo.", "construct a custom select statement. \"\"\" return_id = where_clause is", "expunged session.expunge(obj) return obj_id def delete_from_database(session, objects): \"\"\" Delete each", "unique: result = result.unique() return result.scalar_one() def has_unique_constraint(table, fields): for", "be provided to construct a custom select statement. \"\"\" return_id", "table.constraints: if isinstance(constraint, UniqueConstraint): col_names = {c.name for c in", "to store obj in database; delete from database on exit,", "joint eager loads against collections # https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if unique: result", "iff list(collection) == list(objects), where object equality is determined by", "in table.constraints: if isinstance(constraint, UniqueConstraint): col_names = {c.name for c", "delete, select, UniqueConstraint, ) class AbstractBaseTest(ABC): @pytest.fixture def cls_(self): \"\"\"", "def dbcleanup_wrapper(session, obj, where_clause=None): with dbcleanup(session, obj, where_clause): yield obj", "def get_unique_value(): \"\"\"Generate unique values to accommodate unique constraints.\"\"\" return", "return obj_id def delete_from_database(session, objects): \"\"\" Delete each object in", "obj_id def delete_from_database(session, objects): \"\"\" Delete each object in objects", "where object equality is determined by primary key equality: object1.id", "have the id field as their primary key.) \"\"\" #", "c in constraint.columns} if set(fields) == col_names: return True def", "BaseTest, named TestFoo. \"\"\" prefix = len(\"Test\") class_name = self.__class__.__name__[prefix:]", "{c.name for c in constraint.columns} if set(fields) == col_names: return", "grouping the tests should be a subclass of BaseTest, named", "where_clause=None): \"\"\" Use the session to store obj in database;", "'id' attribute, which must be its primary key. collection.sort(key=lambda item:", "contextmanager from uuid import uuid4 import pytest from sqlalchemy import", "have an id field, a SQLAlchemy WHERE clause should be", "the end of a test if use of a context", "objects = [objects] for obj in objects: table = obj.__table__", "# https://gerrit.sqlalchemy.org/c/sqlalchemy/sqlalchemy/+/2253 if unique: result = result.unique() return result.scalar_one() def", "then the class grouping the tests should be a subclass", "select, UniqueConstraint, ) class AbstractBaseTest(ABC): @pytest.fixture def cls_(self): \"\"\" Return", "the class grouping the tests should be a subclass of", "False if not collection: # True if both are empty", "named TestFoo. \"\"\" prefix = len(\"Test\") class_name = self.__class__.__name__[prefix:] return" ]
[ "and creates a default one if not :param args: Unused", "an initial super user on deployment.\" def handle(self, *args, **kwargs):", "args: Unused :param kwargs: Unused \"\"\" super_users = get_user_model().objects.filter(is_superuser=True) if", "one') else: get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\") self.stdout.write('Created default superuser \"<EMAIL>\"') self.stdout.write('Make sure", "a default one if not :param args: Unused :param kwargs:", "not :param args: Unused :param kwargs: Unused \"\"\" super_users =", "any super users exist and creates a default one if", "get_user_model class Command(BaseCommand): help = \"Creates a default super user", "users exist and creates a default one if not :param", "one if not :param args: Unused :param kwargs: Unused \"\"\"", "\"Creates a default super user if one doesn't already exist.", "\\ \"This is designed to be used in the docker-compose.yml", "super users exist and creates a default one if not", "in the docker-compose.yml to create an initial super user on", ":param kwargs: Unused \"\"\" super_users = get_user_model().objects.filter(is_superuser=True) if super_users.exists(): self.stdout.write('A", "Command(BaseCommand): help = \"Creates a default super user if one", "if not :param args: Unused :param kwargs: Unused \"\"\" super_users", "Unused \"\"\" super_users = get_user_model().objects.filter(is_superuser=True) if super_users.exists(): self.stdout.write('A superuser already", "exists, not creating one') else: get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\") self.stdout.write('Created default superuser", "docker-compose.yml to create an initial super user on deployment.\" def", "\" \\ \"This is designed to be used in the", "import get_user_model class Command(BaseCommand): help = \"Creates a default super", "user if one doesn't already exist. \" \\ \"This is", "already exists, not creating one') else: get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\") self.stdout.write('Created default", "to create an initial super user on deployment.\" def handle(self,", "import BaseCommand from django.contrib.auth import get_user_model class Command(BaseCommand): help =", "= get_user_model().objects.filter(is_superuser=True) if super_users.exists(): self.stdout.write('A superuser already exists, not creating", "kwargs: Unused \"\"\" super_users = get_user_model().objects.filter(is_superuser=True) if super_users.exists(): self.stdout.write('A superuser", "to be used in the docker-compose.yml to create an initial", "else: get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\") self.stdout.write('Created default superuser \"<EMAIL>\"') self.stdout.write('Make sure you", "super_users.exists(): self.stdout.write('A superuser already exists, not creating one') else: get_user_model().objects.create_superuser(email=\"<EMAIL>\",", "default super user if one doesn't already exist. \" \\", "default superuser \"<EMAIL>\"') self.stdout.write('Make sure you change the password immediately!')", "**kwargs): \"\"\" Checks whether any super users exist and creates", "a default super user if one doesn't already exist. \"", "whether any super users exist and creates a default one", "on deployment.\" def handle(self, *args, **kwargs): \"\"\" Checks whether any", "class Command(BaseCommand): help = \"Creates a default super user if", "get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\") self.stdout.write('Created default superuser \"<EMAIL>\"') self.stdout.write('Make sure you change", "deployment.\" def handle(self, *args, **kwargs): \"\"\" Checks whether any super", "super_users = get_user_model().objects.filter(is_superuser=True) if super_users.exists(): self.stdout.write('A superuser already exists, not", "default one if not :param args: Unused :param kwargs: Unused", "Checks whether any super users exist and creates a default", "def handle(self, *args, **kwargs): \"\"\" Checks whether any super users", "django.core.management.base import BaseCommand from django.contrib.auth import get_user_model class Command(BaseCommand): help", "\"This is designed to be used in the docker-compose.yml to", "password=\"<PASSWORD>\") self.stdout.write('Created default superuser \"<EMAIL>\"') self.stdout.write('Make sure you change the", "django.contrib.auth import get_user_model class Command(BaseCommand): help = \"Creates a default", "user on deployment.\" def handle(self, *args, **kwargs): \"\"\" Checks whether", "superuser already exists, not creating one') else: get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\") self.stdout.write('Created", "be used in the docker-compose.yml to create an initial super", "if one doesn't already exist. \" \\ \"This is designed", "not creating one') else: get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\") self.stdout.write('Created default superuser \"<EMAIL>\"')", "doesn't already exist. \" \\ \"This is designed to be", "help = \"Creates a default super user if one doesn't", "self.stdout.write('A superuser already exists, not creating one') else: get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\")", ":param args: Unused :param kwargs: Unused \"\"\" super_users = get_user_model().objects.filter(is_superuser=True)", "initial super user on deployment.\" def handle(self, *args, **kwargs): \"\"\"", "from django.core.management.base import BaseCommand from django.contrib.auth import get_user_model class Command(BaseCommand):", "designed to be used in the docker-compose.yml to create an", "Unused :param kwargs: Unused \"\"\" super_users = get_user_model().objects.filter(is_superuser=True) if super_users.exists():", "already exist. \" \\ \"This is designed to be used", "creating one') else: get_user_model().objects.create_superuser(email=\"<EMAIL>\", password=\"<PASSWORD>\") self.stdout.write('Created default superuser \"<EMAIL>\"') self.stdout.write('Make", "exist. \" \\ \"This is designed to be used in", "is designed to be used in the docker-compose.yml to create", "*args, **kwargs): \"\"\" Checks whether any super users exist and", "\"\"\" super_users = get_user_model().objects.filter(is_superuser=True) if super_users.exists(): self.stdout.write('A superuser already exists,", "<reponame>chrisBrookes93/django-events-management from django.core.management.base import BaseCommand from django.contrib.auth import get_user_model class", "handle(self, *args, **kwargs): \"\"\" Checks whether any super users exist", "if super_users.exists(): self.stdout.write('A superuser already exists, not creating one') else:", "get_user_model().objects.filter(is_superuser=True) if super_users.exists(): self.stdout.write('A superuser already exists, not creating one')", "BaseCommand from django.contrib.auth import get_user_model class Command(BaseCommand): help = \"Creates", "super user if one doesn't already exist. \" \\ \"This", "create an initial super user on deployment.\" def handle(self, *args,", "used in the docker-compose.yml to create an initial super user", "\"\"\" Checks whether any super users exist and creates a", "= \"Creates a default super user if one doesn't already", "from django.contrib.auth import get_user_model class Command(BaseCommand): help = \"Creates a", "one doesn't already exist. \" \\ \"This is designed to", "the docker-compose.yml to create an initial super user on deployment.\"", "exist and creates a default one if not :param args:", "self.stdout.write('Created default superuser \"<EMAIL>\"') self.stdout.write('Make sure you change the password", "creates a default one if not :param args: Unused :param", "super user on deployment.\" def handle(self, *args, **kwargs): \"\"\" Checks" ]
[ "The implementation of this converter deliberately minimizes the amount of", "what's in the parent. - Ability to use genrule image", "because there's no easy way to teach Buck to serialize", "do. NB: This is easy to relax in the future", "Buck would merrily fetch the just the `feature` JSONs from", "parent layer and determine what it provides. - We cannot", "= None, flavor = None, flavor_config_override = None, antlir_rule =", "under the MIT license found in the # LICENSE file", "of an `image.layer` target is a JSON file with information", "graph. Everything else should be delegated to subcommands. ### Command", "behooves us to put most logic in external scripts, so", "have \"unobservable\" dependencies between features. Since feature dependencies are expected", "= \"image_layer\", _layer_name = name, # Build a new layer.", "- `flavor`: Picks default build options for the layer, including", "feature dependencies are expected to routinely cross layer boundaries, feature", "else should be delegated to subcommands. ### Command In composing", "from `REPO_CFG[flavor].flavor_to_config`. - `mount_config`: Specifies how this layer is mounted", "the dependencies of the ancestor layers' features. Doing that would", "resolution An `image.layer` consumes a set of `feature` outputs to", "LICENSE file in the root directory of this source tree.", "box -- whatever it has laid down in the image,", "- We cannot have \"unobservable\" dependencies between features. Since feature", "have the ability to test the \"good\" targets, it behooves", "to vary between machines or between runs. To achieve this,", "avoid problems if e.g. the user renames their repo, or", "deliberately minimizes the amount of business logic in its command.", "Therefore, `image.layer` has to explicitly tell buck that it needs", "of engineering, both of these non-pure approaches are a terrible", "disk paths into the command, do not do anything that", "all that the parent layer implementation can do. NB: This", "cannot make assertions about targets that fail to build. Since", "by writing a manifest with additional metadata into each layer,", "not do anything that might cause the bytes of the", "composing the `bash` command, our core maxim is: make it", "to routinely cross layer boundaries, feature implementations are forced only", "to explicitly tell buck that it needs all direct dependencies", "its command. The converter must include **only** our interactions with", "how). The consequences of this information hiding are: - Better", "depend on data that can be inferred from the filesystem", "buck targets to be emitted. `container` is always included in", "the btrfs subvolume. These outputs are actually just JSON files", "the field in `_image_layer_impl` in `image_layer_utils.bzl` and the [docs](/docs/tutorials/helper-buck-targets#imagelayer) for", "the MIT license found in the # LICENSE file in", "_layer_name = name, # Build a new layer. It may", "to helper scripts. We rely on environment variables or pipes", "engineering, both of these non-pure approaches are a terrible idea", "and determine what it provides. - We cannot have \"unobservable\"", "builds from legacy imperative systems. - The image compiler needs", "= compile_image_features( name = name, current_target = image_utils.current_target(name), parent_layer =", "we only have the ability to test the \"good\" targets,", "do anything that might cause the bytes of the command", "transitioning to Buck image builds from legacy imperative systems. -", "license found in the # LICENSE file in the root", "Buck to serialize a btrfs subvolume (for that, we have", "problems if e.g. the user renames their repo, or similar.", "in the per-repo `buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`. ## Implementation notes The", "of `feature` outputs to decide what to put into the", "details. - `flavor_config_override`: A struct that can override the default", "`flavor_config_override`: A struct that can override the default values fetched", "how to invoke them. \"\"\" image_layer_utils.image_layer_impl( _rule_type = \"image_layer\", _layer_name", "`image_layer_utils.bzl` and the [docs](/docs/tutorials/helper-buck-targets#imagelayer) for the list of possible helpers,", "and thus can be represented as a btrfs send-stream for", "that fail to build. Since we only have the ability", "there's no easy way to teach Buck to serialize a", "from the filesystem -- since this is all that the", "to invoke them. \"\"\" image_layer_utils.image_layer_impl( _rule_type = \"image_layer\", _layer_name =", "additional parameters. Its purpose to materialize those `feature`s as a", "future by writing a manifest with additional metadata into each", "snapshot of its `parent_layer`, and thus can be represented as", "we have `package.new`). That said, we should still follow best", "approaches are a terrible idea and a maintainability headache, but", "it can be built on top of a snapshot of", "load(\":compile_image_features.bzl\", \"compile_image_features\") load(\":image_layer_utils.bzl\", \"image_layer_utils\") load(\":image_utils.bzl\", \"image_utils\") def image_layer( name, parent_layer", "compile_image_features( name = name, current_target = image_utils.current_target(name), parent_layer = parent_layer,", "layer as a black box -- whatever it has laid", "parent layer implementation can do. NB: This is easy to", "desired helper buck targets to be emitted. `container` is always", "will install its features. - `features`: List of `feature` target", "subvolume in the per-repo `buck-image/out/volume/targets`. We call the subvolume a", "[docs](/docs/tutorials/helper-buck-targets#imagelayer) for the list of possible helpers, their respective behaviours,", "a set of `feature` with some additional parameters. Its purpose", "None, features = None, flavor = None, flavor_config_override = None,", "default. See the field in `_image_layer_impl` in `image_layer_utils.bzl` and the", "find the resulting layer in the per-repo `buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`.", "all direct dependencies of its `feature`s to be present on", "Ability to use genrule image layers / apply non-pure post-processing", "always included in the list by default. See the field", "Buck cache efficiency -- we don't have to download the", "uncacheable, because there's no easy way to teach Buck to", "pass data between the helper scripts. Another reason to keep", "outputs are actually just JSON files that reference other targets,", "In terms of engineering, both of these non-pure approaches are", "we should still follow best practices to avoid problems if", "paths instead of paths into the output directory. ### Dependency", "during compilation. \"\"\" load(\":compile_image_features.bzl\", \"compile_image_features\") load(\":image_layer_utils.bzl\", \"image_layer_utils\") load(\":image_utils.bzl\", \"image_utils\") def", "is mounted in the `mounts` field of a `feature` of", "make it a hermetic function of the converter's inputs --", "hermetic function of the converter's inputs -- do not read", "JSON file with information on how to find the resulting", "btrfs send-stream for more efficient storage & distribution. The Buck", "top of which the current layer will install its features.", "parent layer. See the field in `_image_layer_impl` in `image_layer_utils.bzl` -", "has laid down in the image, that's what it provides", "download the dependencies of the ancestor layers' features. Doing that", "empty. _make_subvol_cmd = compile_image_features( name = name, current_target = image_utils.current_target(name),", "on environment variables or pipes to pass data between the", "-- do not read data from disk, do not insert", "can unit-test its successes **and** failures thoroughly. ### Output We", "systems. - The image compiler needs a litte extra code", "a `feature` of a parent layer. See the field in", "We rely on environment variables or pipes to pass data", "maxim is: make it a hermetic function of the converter's", "external scripts, so that we can unit-test its successes **and**", "the image, that's what it provides (and we don't care", "source tree. \"\"\" An `image.layer` is a set of `feature`", "since those bits are redundant with what's in the parent.", "root directory of this source tree. \"\"\" An `image.layer` is", "a parent layer. See the field in `_image_layer_impl` in `image_layer_utils.bzl`", "a btrfs subvolume (for that, we have `package.new`). That said,", "the output directory. ### Dependency resolution An `image.layer` consumes a", "to decide what to put into the btrfs subvolume. These", "the per-repo `buck-image/out/volume/targets`. We call the subvolume a \"layer\" because", "`flavor_helpers.bzl` for details. - `flavor_config_override`: A struct that can override", "in the image, that's what it provides (and we don't", "a terrible idea and a maintainability headache, but they do", "writing a manifest with additional metadata into each layer, and", "\"image_utils\") def image_layer( name, parent_layer = None, features = None,", "default values fetched from `REPO_CFG[flavor].flavor_to_config`. - `mount_config`: Specifies how this", "- Store Buck target paths instead of paths into the", "### Command In composing the `bash` command, our core maxim", "fetched from `REPO_CFG[flavor].flavor_to_config`. - `mount_config`: Specifies how this layer is", "the image. We do NOT need the direct dependencies of", "it provides. - We cannot have \"unobservable\" dependencies between features.", "that it needs all direct dependencies of its `feature`s to", "cross layer boundaries, feature implementations are forced only to depend", "a maintainability headache, but they do provide a useful bridge", "that, we have `package.new`). That said, we should still follow", "what it provides. - We cannot have \"unobservable\" dependencies between", "the ancestor layers' features. Doing that would be wasteful, since", "- `features`: List of `feature` target paths and/or nameless structs", "have to download the dependencies of the ancestor layers' features.", "do not read data from disk, do not insert disk", "this source tree. \"\"\" An `image.layer` is a set of", "targets, and do not contain the data to be written", "test the \"good\" targets, it behooves us to put most", "`features`: List of `feature` target paths and/or nameless structs from", "a btrfs subvolume in the per-repo `buck-image/out/volume/targets`. We call the", "- `mount_config`: Specifies how this layer is mounted in the", "the image. Therefore, `image.layer` has to explicitly tell buck that", "care about how). The consequences of this information hiding are:", "and a maintainability headache, but they do provide a useful", "flavor = None, flavor_config_override = None, antlir_rule = \"user-internal\", **image_layer_kwargs):", "image_layer_utils.image_layer_impl( _rule_type = \"image_layer\", _layer_name = name, # Build a", "Buck macros to resolve all paths, including those to helper", "the just the `feature` JSONs from its cache, and not", "These practices include: - The output JSON must store no", "use Buck macros to resolve all paths, including those to", "target paths instead of paths into the output directory. ###", "An `image.layer` consumes a set of `feature` outputs to decide", "no absolute paths. - Store Buck target paths instead of", "no easy way to teach Buck to serialize a btrfs", "to walk the parent layer and determine what it provides.", "is: make it a hermetic function of the converter's inputs", "disk -- see our `attrfilter` queries below. Without this, Buck", "logic in external scripts, so that we can unit-test its", "on how to find the resulting layer in the per-repo", "of the converter's inputs -- do not read data from", "since this is all that the parent layer implementation can", "they do provide a useful bridge for transitioning to Buck", "- `flavor_config_override`: A struct that can override the default values", "the root directory of this source tree. \"\"\" An `image.layer`", "amount of business logic in its command. The converter must", "what to put into the btrfs subvolume. These outputs are", "about how). The consequences of this information hiding are: -", "delegated to subcommands. ### Command In composing the `bash` command,", "of this information hiding are: - Better Buck cache efficiency", "queries below. Without this, Buck would merrily fetch the just", "more efficient storage & distribution. The Buck output of an", "are: - Better Buck cache efficiency -- we don't have", "layer. In terms of engineering, both of these non-pure approaches", "affiliates. # # This source code is licensed under the", "mark `image.layer` uncacheable, because there's no easy way to teach", "apply non-pure post-processing to a layer. In terms of engineering,", "to build. Since we only have the ability to test", "hiding are: - Better Buck cache efficiency -- we don't", "teach Buck to serialize a btrfs subvolume (for that, we", "= None, antlir_rule = \"user-internal\", **image_layer_kwargs): \"\"\" Arguments - `parent_layer`:", "still follow best practices to avoid problems if e.g. the", "target graph. Everything else should be delegated to subcommands. ###", "`image.layer` target is a JSON file with information on how", "`feature` target paths and/or nameless structs from `feature.new`. - `flavor`:", "Dependency resolution An `image.layer` consumes a set of `feature` outputs", "a snapshot of its `parent_layer`, and thus can be represented", "with some additional parameters. Its purpose to materialize those `feature`s", "= name, # Build a new layer. It may be", "we don't have to download the dependencies of the ancestor", "layer in the per-repo `buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`. ## Implementation notes", "or pipes to pass data between the helper scripts. Another", "a \"layer\" because it can be built on top of", "our `attrfilter` queries below. Without this, Buck would merrily fetch", "features, flavor = flavor, flavor_config_override = flavor_config_override, ), antlir_rule =", "the subvolume a \"layer\" because it can be built on", "parent layer as a black box -- whatever it has", "distribution. The Buck output of an `image.layer` target is a", "the `bash` command, our core maxim is: make it a", "`build_appliance`, RPM installer, and others. See `flavor_helpers.bzl` for details. -", "to be emitted. `container` is always included in the list", "parent layer's features, because we treat the parent layer as", "the buid artifacts that comprise the image. We do NOT", "A struct that can override the default values fetched from", "data between the helper scripts. Another reason to keep this", "forced only to depend on data that can be inferred", "for the layer, including `build_appliance`, RPM installer, and others. See", "our core maxim is: make it a hermetic function of", "of the buid artifacts that comprise the image. We do", "Picks default build options for the layer, including `build_appliance`, RPM", "`attrfilter` queries below. Without this, Buck would merrily fetch the", "Its purpose to materialize those `feature`s as a btrfs subvolume", "it a hermetic function of the converter's inputs -- do", "the command to vary between machines or between runs. To", "and its affiliates. # # This source code is licensed", "In composing the `bash` command, our core maxim is: make", "`bash` command, our core maxim is: make it a hermetic", "converter minimal is that `buck test` cannot make assertions about", "that can be inferred from the filesystem -- since this", "of a parent layer. See the field in `_image_layer_impl` in", "of the command to vary between machines or between runs.", "not contain the data to be written into the image.", "be built on top of a snapshot of its `parent_layer`,", "thoroughly. ### Output We mark `image.layer` uncacheable, because there's no", "those `feature`s as a btrfs subvolume in the per-repo `buck-image/out/volume/targets`.", "this converter deliberately minimizes the amount of business logic in", "the bytes of the command to vary between machines or", "Copyright (c) Facebook, Inc. and its affiliates. # # This", "send-stream for more efficient storage & distribution. The Buck output", "\"compile_image_features\") load(\":image_layer_utils.bzl\", \"image_layer_utils\") load(\":image_utils.bzl\", \"image_utils\") def image_layer( name, parent_layer =", "provide us with any of the buid artifacts that comprise", "them. \"\"\" image_layer_utils.image_layer_impl( _rule_type = \"image_layer\", _layer_name = name, #", "/ apply non-pure post-processing to a layer. In terms of", "Another reason to keep this converter minimal is that `buck", "because we treat the parent layer as a black box", "disk, do not insert disk paths into the command, do", "needs a litte extra code to walk the parent layer", "on top of which the current layer will install its", "of a snapshot of its `parent_layer`, and thus can be", "image layers / apply non-pure post-processing to a layer. In", "features = None, flavor = None, flavor_config_override = None, antlir_rule", "we can unit-test its successes **and** failures thoroughly. ### Output", "\"user-internal\", **image_layer_kwargs): \"\"\" Arguments - `parent_layer`: The name of another", "Implementation notes The implementation of this converter deliberately minimizes the", "in `_image_layer_impl` in `image_layer_utils.bzl` - `runtime`: A list of desired", "paths and/or nameless structs from `feature.new`. - `flavor`: Picks default", "-- since this is all that the parent layer implementation", "practices to avoid problems if e.g. the user renames their", "put into the btrfs subvolume. These outputs are actually just", "should be delegated to subcommands. ### Command In composing the", "tree. \"\"\" An `image.layer` is a set of `feature` with", "layer boundaries, feature implementations are forced only to depend on", "converter deliberately minimizes the amount of business logic in its", "list of possible helpers, their respective behaviours, and how to", "that metadata during compilation. \"\"\" load(\":compile_image_features.bzl\", \"compile_image_features\") load(\":image_layer_utils.bzl\", \"image_layer_utils\") load(\":image_utils.bzl\",", "paths into the command, do not do anything that might", "of this source tree. \"\"\" An `image.layer` is a set", "repo, or similar. These practices include: - The output JSON", "of another `image_layer` target, on top of which the current", "best practices to avoid problems if e.g. the user renames", "field of a `feature` of a parent layer. See the", "structs from `feature.new`. - `flavor`: Picks default build options for", "# LICENSE file in the root directory of this source", "of paths into the output directory. ### Dependency resolution An", "inferred from the filesystem -- since this is all that", "materialize those `feature`s as a btrfs subvolume in the per-repo", "must include **only** our interactions with the buck target graph.", "buid artifacts that comprise the image. We do NOT need", "are redundant with what's in the parent. - Ability to", "NB: This is easy to relax in the future by", "layer and determine what it provides. - We cannot have", "nameless structs from `feature.new`. - `flavor`: Picks default build options", "below. Without this, Buck would merrily fetch the just the", "found in the # LICENSE file in the root directory", "the helper scripts. Another reason to keep this converter minimal", "`package.new`). That said, we should still follow best practices to", "None, antlir_rule = \"user-internal\", **image_layer_kwargs): \"\"\" Arguments - `parent_layer`: The", "= name, current_target = image_utils.current_target(name), parent_layer = parent_layer, features =", "and the [docs](/docs/tutorials/helper-buck-targets#imagelayer) for the list of possible helpers, their", "current layer will install its features. - `features`: List of", "subcommands. ### Command In composing the `bash` command, our core", "_rule_type = \"image_layer\", _layer_name = name, # Build a new", "the data to be written into the image. Therefore, `image.layer`", "that we can unit-test its successes **and** failures thoroughly. ###", "JSON files that reference other targets, and do not contain", "is all that the parent layer implementation can do. NB:", "features. Since feature dependencies are expected to routinely cross layer", "those bits are redundant with what's in the parent. -", "to test the \"good\" targets, it behooves us to put", "must store no absolute paths. - Store Buck target paths", "practices include: - The output JSON must store no absolute", "consumes a set of `feature` outputs to decide what to", "The output JSON must store no absolute paths. - Store", "to be present on disk -- see our `attrfilter` queries", "### Output We mark `image.layer` uncacheable, because there's no easy", "each layer, and using that metadata during compilation. \"\"\" load(\":compile_image_features.bzl\",", "parent. - Ability to use genrule image layers / apply", "litte extra code to walk the parent layer and determine", "See `flavor_helpers.bzl` for details. - `flavor_config_override`: A struct that can", "converter must include **only** our interactions with the buck target", "decide what to put into the btrfs subvolume. These outputs", "the list of possible helpers, their respective behaviours, and how", "a manifest with additional metadata into each layer, and using", "not insert disk paths into the command, do not do", "field in `_image_layer_impl` in `image_layer_utils.bzl` and the [docs](/docs/tutorials/helper-buck-targets#imagelayer) for the", "business logic in its command. The converter must include **only**", "the buck target graph. Everything else should be delegated to", "be wasteful, since those bits are redundant with what's in", "image, that's what it provides (and we don't care about", "this layer is mounted in the `mounts` field of a", "information hiding are: - Better Buck cache efficiency -- we", "logic in its command. The converter must include **only** our", "absolute paths. - Store Buck target paths instead of paths", "layers' features. Doing that would be wasteful, since those bits", "is always included in the list by default. See the", "extra code to walk the parent layer and determine what", "image_utils.current_target(name), parent_layer = parent_layer, features = features, flavor = flavor,", "and using that metadata during compilation. \"\"\" load(\":compile_image_features.bzl\", \"compile_image_features\") load(\":image_layer_utils.bzl\",", "are expected to routinely cross layer boundaries, feature implementations are", "built on top of a snapshot of its `parent_layer`, and", "artifacts that comprise the image. We do NOT need the", "(c) Facebook, Inc. and its affiliates. # # This source", "ability to test the \"good\" targets, it behooves us to", "notes The implementation of this converter deliberately minimizes the amount", "This is easy to relax in the future by writing", "the `mounts` field of a `feature` of a parent layer.", "`mounts` field of a `feature` of a parent layer. See", "should still follow best practices to avoid problems if e.g.", "in the parent. - Ability to use genrule image layers", "is a JSON file with information on how to find", "to use genrule image layers / apply non-pure post-processing to", "non-pure approaches are a terrible idea and a maintainability headache,", "be inferred from the filesystem -- since this is all", "in the list by default. See the field in `_image_layer_impl`", "do not insert disk paths into the command, do not", "a useful bridge for transitioning to Buck image builds from", "are a terrible idea and a maintainability headache, but they", "new layer. It may be empty. _make_subvol_cmd = compile_image_features( name", "the user renames their repo, or similar. These practices include:", "provides (and we don't care about how). The consequences of", "buck target graph. Everything else should be delegated to subcommands.", "and how to invoke them. \"\"\" image_layer_utils.image_layer_impl( _rule_type = \"image_layer\",", "- Ability to use genrule image layers / apply non-pure", "helper scripts. We rely on environment variables or pipes to", "are forced only to depend on data that can be", "default build options for the layer, including `build_appliance`, RPM installer,", "`buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`. ## Implementation notes The implementation of this", "on data that can be inferred from the filesystem --", "bridge for transitioning to Buck image builds from legacy imperative", "read data from disk, do not insert disk paths into", "in external scripts, so that we can unit-test its successes", "other targets, and do not contain the data to be", "us with any of the buid artifacts that comprise the", "An `image.layer` is a set of `feature` with some additional", "The converter must include **only** our interactions with the buck", "outputs to decide what to put into the btrfs subvolume.", "idea and a maintainability headache, but they do provide a", "represented as a btrfs send-stream for more efficient storage &", "features. - `features`: List of `feature` target paths and/or nameless", "Better Buck cache efficiency -- we don't have to download", "just JSON files that reference other targets, and do not", "to be written into the image. Therefore, `image.layer` has to", "cache, and not provide us with any of the buid", "command. The converter must include **only** our interactions with the", "dependencies of the parent layer's features, because we treat the", "efficient storage & distribution. The Buck output of an `image.layer`", "file with information on how to find the resulting layer", "output JSON must store no absolute paths. - Store Buck", "resulting layer in the per-repo `buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`. ## Implementation", "This source code is licensed under the MIT license found", "this, we use Buck macros to resolve all paths, including", "are actually just JSON files that reference other targets, and", "command to vary between machines or between runs. To achieve", "mounted in the `mounts` field of a `feature` of a", "feature implementations are forced only to depend on data that", "resolve all paths, including those to helper scripts. We rely", "bits are redundant with what's in the parent. - Ability", "which the current layer will install its features. - `features`:", "provides. - We cannot have \"unobservable\" dependencies between features. Since", "to depend on data that can be inferred from the", "`buck test` cannot make assertions about targets that fail to", "scripts. We rely on environment variables or pipes to pass", "the resulting layer in the per-repo `buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`. ##", "easy to relax in the future by writing a manifest", "We do NOT need the direct dependencies of the parent", "It may be empty. _make_subvol_cmd = compile_image_features( name = name,", "= parent_layer, features = features, flavor = flavor, flavor_config_override =", "licensed under the MIT license found in the # LICENSE", "variables or pipes to pass data between the helper scripts.", "way to teach Buck to serialize a btrfs subvolume (for", "data that can be inferred from the filesystem -- since", "flavor, flavor_config_override = flavor_config_override, ), antlir_rule = antlir_rule, **image_layer_kwargs )", "as a black box -- whatever it has laid down", "Store Buck target paths instead of paths into the output", "the [docs](/docs/tutorials/helper-buck-targets#imagelayer) for the list of possible helpers, their respective", "\"\"\" An `image.layer` is a set of `feature` with some", "layer's features, because we treat the parent layer as a", "be written into the image. Therefore, `image.layer` has to explicitly", "See the field in `_image_layer_impl` in `image_layer_utils.bzl` - `runtime`: A", "MIT license found in the # LICENSE file in the", "of its `parent_layer`, and thus can be represented as a", "that might cause the bytes of the command to vary", "Since we only have the ability to test the \"good\"", "it has laid down in the image, that's what it", "Buck image builds from legacy imperative systems. - The image", "\"\"\" Arguments - `parent_layer`: The name of another `image_layer` target,", "`mount_config`: Specifies how this layer is mounted in the `mounts`", "`_image_layer_impl` in `image_layer_utils.bzl` and the [docs](/docs/tutorials/helper-buck-targets#imagelayer) for the list of", "paths into the output directory. ### Dependency resolution An `image.layer`", "is a set of `feature` with some additional parameters. Its", "it needs all direct dependencies of its `feature`s to be", "be present on disk -- see our `attrfilter` queries below.", "**only** our interactions with the buck target graph. Everything else", "what it provides (and we don't care about how). The", "name, parent_layer = None, features = None, flavor = None,", "pipes to pass data between the helper scripts. Another reason", "of which the current layer will install its features. -", "# Build a new layer. It may be empty. _make_subvol_cmd", "us to put most logic in external scripts, so that", "See `SubvolumeOnDisk.to_json_file`. ## Implementation notes The implementation of this converter", "Doing that would be wasteful, since those bits are redundant", "of its `feature`s to be present on disk -- see", "Specifies how this layer is mounted in the `mounts` field", "that can override the default values fetched from `REPO_CFG[flavor].flavor_to_config`. -", "instead of paths into the output directory. ### Dependency resolution", "of desired helper buck targets to be emitted. `container` is", "would merrily fetch the just the `feature` JSONs from its", "the parent layer as a black box -- whatever it", "may be empty. _make_subvol_cmd = compile_image_features( name = name, current_target", "build options for the layer, including `build_appliance`, RPM installer, and", "layer. See the field in `_image_layer_impl` in `image_layer_utils.bzl` - `runtime`:", "layer. It may be empty. _make_subvol_cmd = compile_image_features( name =", "target paths and/or nameless structs from `feature.new`. - `flavor`: Picks", "directory of this source tree. \"\"\" An `image.layer` is a", "the command, do not do anything that might cause the", "To achieve this, we use Buck macros to resolve all", "paths, including those to helper scripts. We rely on environment", "of these non-pure approaches are a terrible idea and a", "btrfs subvolume (for that, we have `package.new`). That said, we", "`parent_layer`, and thus can be represented as a btrfs send-stream", "efficiency -- we don't have to download the dependencies of", "determine what it provides. - We cannot have \"unobservable\" dependencies", "terrible idea and a maintainability headache, but they do provide", "unit-test its successes **and** failures thoroughly. ### Output We mark", "to resolve all paths, including those to helper scripts. We", "paths. - Store Buck target paths instead of paths into", "call the subvolume a \"layer\" because it can be built", "in the # LICENSE file in the root directory of", "image_layer( name, parent_layer = None, features = None, flavor =", "Output We mark `image.layer` uncacheable, because there's no easy way", "have `package.new`). That said, we should still follow best practices", "scripts. Another reason to keep this converter minimal is that", "using that metadata during compilation. \"\"\" load(\":compile_image_features.bzl\", \"compile_image_features\") load(\":image_layer_utils.bzl\", \"image_layer_utils\")", "parent_layer = parent_layer, features = features, flavor = flavor, flavor_config_override", "### Dependency resolution An `image.layer` consumes a set of `feature`", "to relax in the future by writing a manifest with", "fail to build. Since we only have the ability to", "ancestor layers' features. Doing that would be wasteful, since those", "\"image_layer_utils\") load(\":image_utils.bzl\", \"image_utils\") def image_layer( name, parent_layer = None, features", "data from disk, do not insert disk paths into the", "implementations are forced only to depend on data that can", "include **only** our interactions with the buck target graph. Everything", "data to be written into the image. Therefore, `image.layer` has", "others. See `flavor_helpers.bzl` for details. - `flavor_config_override`: A struct that", "that `buck test` cannot make assertions about targets that fail", "`feature` of a parent layer. See the field in `_image_layer_impl`", "failures thoroughly. ### Output We mark `image.layer` uncacheable, because there's", "behaviours, and how to invoke them. \"\"\" image_layer_utils.image_layer_impl( _rule_type =", "\"\"\" load(\":compile_image_features.bzl\", \"compile_image_features\") load(\":image_layer_utils.bzl\", \"image_layer_utils\") load(\":image_utils.bzl\", \"image_utils\") def image_layer( name,", "subvolume. These outputs are actually just JSON files that reference", "this is all that the parent layer implementation can do.", "the current layer will install its features. - `features`: List", "helpers, their respective behaviours, and how to invoke them. \"\"\"", "image. Therefore, `image.layer` has to explicitly tell buck that it", "compilation. \"\"\" load(\":compile_image_features.bzl\", \"compile_image_features\") load(\":image_layer_utils.bzl\", \"image_layer_utils\") load(\":image_utils.bzl\", \"image_utils\") def image_layer(", "on top of a snapshot of its `parent_layer`, and thus", "written into the image. Therefore, `image.layer` has to explicitly tell", "metadata into each layer, and using that metadata during compilation.", "invoke them. \"\"\" image_layer_utils.image_layer_impl( _rule_type = \"image_layer\", _layer_name = name,", "= flavor, flavor_config_override = flavor_config_override, ), antlir_rule = antlir_rule, **image_layer_kwargs", "redundant with what's in the parent. - Ability to use", "into each layer, and using that metadata during compilation. \"\"\"", "actually just JSON files that reference other targets, and do", "`image_layer` target, on top of which the current layer will", "include: - The output JSON must store no absolute paths.", "in its command. The converter must include **only** our interactions", "list of desired helper buck targets to be emitted. `container`", "use genrule image layers / apply non-pure post-processing to a", "additional metadata into each layer, and using that metadata during", "runs. To achieve this, we use Buck macros to resolve", "tell buck that it needs all direct dependencies of its", "`_image_layer_impl` in `image_layer_utils.bzl` - `runtime`: A list of desired helper", "name of another `image_layer` target, on top of which the", "parent_layer, features = features, flavor = flavor, flavor_config_override = flavor_config_override,", "put most logic in external scripts, so that we can", "per-repo `buck-image/out/volume/targets`. We call the subvolume a \"layer\" because it", "between machines or between runs. To achieve this, we use", "the parent layer's features, because we treat the parent layer", "from legacy imperative systems. - The image compiler needs a", "## Implementation notes The implementation of this converter deliberately minimizes", "`runtime`: A list of desired helper buck targets to be", "`REPO_CFG[flavor].flavor_to_config`. - `mount_config`: Specifies how this layer is mounted in", "Buck target paths instead of paths into the output directory.", "down in the image, that's what it provides (and we", "their respective behaviours, and how to invoke them. \"\"\" image_layer_utils.image_layer_impl(", "that reference other targets, and do not contain the data", "direct dependencies of the parent layer's features, because we treat", "how this layer is mounted in the `mounts` field of", "converter's inputs -- do not read data from disk, do", "helper scripts. Another reason to keep this converter minimal is", "features. Doing that would be wasteful, since those bits are", "with information on how to find the resulting layer in", "Command In composing the `bash` command, our core maxim is:", "layer will install its features. - `features`: List of `feature`", "that comprise the image. We do NOT need the direct", "needs all direct dependencies of its `feature`s to be present", "explicitly tell buck that it needs all direct dependencies of", "in `image_layer_utils.bzl` - `runtime`: A list of desired helper buck", "btrfs subvolume in the per-repo `buck-image/out/volume/targets`. We call the subvolume", "the direct dependencies of the parent layer's features, because we", "RPM installer, and others. See `flavor_helpers.bzl` for details. - `flavor_config_override`:", "follow best practices to avoid problems if e.g. the user", "can be inferred from the filesystem -- since this is", "reference other targets, and do not contain the data to", "serialize a btrfs subvolume (for that, we have `package.new`). That", "core maxim is: make it a hermetic function of the", "renames their repo, or similar. These practices include: - The", "test` cannot make assertions about targets that fail to build.", "None, flavor_config_override = None, antlir_rule = \"user-internal\", **image_layer_kwargs): \"\"\" Arguments", "vary between machines or between runs. To achieve this, we", "load(\":image_layer_utils.bzl\", \"image_layer_utils\") load(\":image_utils.bzl\", \"image_utils\") def image_layer( name, parent_layer = None,", "`feature` with some additional parameters. Its purpose to materialize those", "command, do not do anything that might cause the bytes", "black box -- whatever it has laid down in the", "expected to routinely cross layer boundaries, feature implementations are forced", "so that we can unit-test its successes **and** failures thoroughly.", "the # LICENSE file in the root directory of this", "for the list of possible helpers, their respective behaviours, and", "target, on top of which the current layer will install", "layer implementation can do. NB: This is easy to relax", "with the buck target graph. Everything else should be delegated", "to Buck image builds from legacy imperative systems. - The", "to put most logic in external scripts, so that we", "a hermetic function of the converter's inputs -- do not", "the filesystem -- since this is all that the parent", "set of `feature` outputs to decide what to put into", "of the parent layer's features, because we treat the parent", "we treat the parent layer as a black box --", "layer is mounted in the `mounts` field of a `feature`", "routinely cross layer boundaries, feature implementations are forced only to", "its features. - `features`: List of `feature` target paths and/or", "this information hiding are: - Better Buck cache efficiency --", "only to depend on data that can be inferred from", "our interactions with the buck target graph. Everything else should", "= None, features = None, flavor = None, flavor_config_override =", "inputs -- do not read data from disk, do not", "terms of engineering, both of these non-pure approaches are a", "flavor_config_override = None, antlir_rule = \"user-internal\", **image_layer_kwargs): \"\"\" Arguments -", "on disk -- see our `attrfilter` queries below. Without this,", "for transitioning to Buck image builds from legacy imperative systems.", "whatever it has laid down in the image, that's what", "not provide us with any of the buid artifacts that", "of this converter deliberately minimizes the amount of business logic", "to avoid problems if e.g. the user renames their repo,", "how to find the resulting layer in the per-repo `buck-image/out/volume/targets`.", "NOT need the direct dependencies of the parent layer's features,", "a litte extra code to walk the parent layer and", "**image_layer_kwargs): \"\"\" Arguments - `parent_layer`: The name of another `image_layer`", "We call the subvolume a \"layer\" because it can be", "the amount of business logic in its command. The converter", "in the future by writing a manifest with additional metadata", "for details. - `flavor_config_override`: A struct that can override the", "to a layer. In terms of engineering, both of these", "code is licensed under the MIT license found in the", "manifest with additional metadata into each layer, and using that", "dependencies of its `feature`s to be present on disk --", "user renames their repo, or similar. These practices include: -", "some additional parameters. Its purpose to materialize those `feature`s as", "files that reference other targets, and do not contain the", "top of a snapshot of its `parent_layer`, and thus can", "That said, we should still follow best practices to avoid", "walk the parent layer and determine what it provides. -", "`image.layer` uncacheable, because there's no easy way to teach Buck", "source code is licensed under the MIT license found in", "`feature` JSONs from its cache, and not provide us with", "Facebook, Inc. and its affiliates. # # This source code", "targets that fail to build. Since we only have the", "the list by default. See the field in `_image_layer_impl` in", "easy way to teach Buck to serialize a btrfs subvolume", "relax in the future by writing a manifest with additional", "a new layer. It may be empty. _make_subvol_cmd = compile_image_features(", "or between runs. To achieve this, we use Buck macros", "**and** failures thoroughly. ### Output We mark `image.layer` uncacheable, because", "need the direct dependencies of the parent layer's features, because", "helper buck targets to be emitted. `container` is always included", "= features, flavor = flavor, flavor_config_override = flavor_config_override, ), antlir_rule", "in the per-repo `buck-image/out/volume/targets`. We call the subvolume a \"layer\"", "by default. See the field in `_image_layer_impl` in `image_layer_utils.bzl` and", "# # This source code is licensed under the MIT", "genrule image layers / apply non-pure post-processing to a layer.", "def image_layer( name, parent_layer = None, features = None, flavor", "be empty. _make_subvol_cmd = compile_image_features( name = name, current_target =", "said, we should still follow best practices to avoid problems", "We cannot have \"unobservable\" dependencies between features. Since feature dependencies", "has to explicitly tell buck that it needs all direct", "buck that it needs all direct dependencies of its `feature`s", "antlir_rule = \"user-internal\", **image_layer_kwargs): \"\"\" Arguments - `parent_layer`: The name", "struct that can override the default values fetched from `REPO_CFG[flavor].flavor_to_config`.", "be emitted. `container` is always included in the list by", "to put into the btrfs subvolume. These outputs are actually", "to subcommands. ### Command In composing the `bash` command, our", "the layer, including `build_appliance`, RPM installer, and others. See `flavor_helpers.bzl`", "implementation of this converter deliberately minimizes the amount of business", "# This source code is licensed under the MIT license", "can be represented as a btrfs send-stream for more efficient", "the per-repo `buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`. ## Implementation notes The implementation", "imperative systems. - The image compiler needs a litte extra", "would be wasteful, since those bits are redundant with what's", "scripts, so that we can unit-test its successes **and** failures", "merrily fetch the just the `feature` JSONs from its cache,", "cannot have \"unobservable\" dependencies between features. Since feature dependencies are", "\"image_layer\", _layer_name = name, # Build a new layer. It", "current_target = image_utils.current_target(name), parent_layer = parent_layer, features = features, flavor", "features = features, flavor = flavor, flavor_config_override = flavor_config_override, ),", "Arguments - `parent_layer`: The name of another `image_layer` target, on", "`feature`s as a btrfs subvolume in the per-repo `buck-image/out/volume/targets`. We", "The Buck output of an `image.layer` target is a JSON", "another `image_layer` target, on top of which the current layer", "achieve this, we use Buck macros to resolve all paths,", "emitted. `container` is always included in the list by default.", "dependencies of the ancestor layers' features. Doing that would be", "wasteful, since those bits are redundant with what's in the", "These outputs are actually just JSON files that reference other", "& distribution. The Buck output of an `image.layer` target is", "information on how to find the resulting layer in the", "similar. These practices include: - The output JSON must store", "to find the resulting layer in the per-repo `buck-image/out/volume/targets`. See", "See the field in `_image_layer_impl` in `image_layer_utils.bzl` and the [docs](/docs/tutorials/helper-buck-targets#imagelayer)", "name, # Build a new layer. It may be empty.", "directory. ### Dependency resolution An `image.layer` consumes a set of", "between the helper scripts. Another reason to keep this converter", "treat the parent layer as a black box -- whatever", "minimizes the amount of business logic in its command. The", "<gh_stars>10-100 # Copyright (c) Facebook, Inc. and its affiliates. #", "that's what it provides (and we don't care about how).", "the parent. - Ability to use genrule image layers /", "we use Buck macros to resolve all paths, including those", "it behooves us to put most logic in external scripts,", "we don't care about how). The consequences of this information", "metadata during compilation. \"\"\" load(\":compile_image_features.bzl\", \"compile_image_features\") load(\":image_layer_utils.bzl\", \"image_layer_utils\") load(\":image_utils.bzl\", \"image_utils\")", "- Better Buck cache efficiency -- we don't have to", "in the root directory of this source tree. \"\"\" An", "insert disk paths into the command, do not do anything", "direct dependencies of its `feature`s to be present on disk", "useful bridge for transitioning to Buck image builds from legacy", "field in `_image_layer_impl` in `image_layer_utils.bzl` - `runtime`: A list of", "in the `mounts` field of a `feature` of a parent", "reason to keep this converter minimal is that `buck test`", "`image.layer` consumes a set of `feature` outputs to decide what", "and do not contain the data to be written into", "Since feature dependencies are expected to routinely cross layer boundaries,", "its `parent_layer`, and thus can be represented as a btrfs", "do not contain the data to be written into the", "target is a JSON file with information on how to", "of business logic in its command. The converter must include", "those to helper scripts. We rely on environment variables or", "override the default values fetched from `REPO_CFG[flavor].flavor_to_config`. - `mount_config`: Specifies", "an `image.layer` target is a JSON file with information on", "None, flavor = None, flavor_config_override = None, antlir_rule = \"user-internal\",", "macros to resolve all paths, including those to helper scripts.", "in `image_layer_utils.bzl` and the [docs](/docs/tutorials/helper-buck-targets#imagelayer) for the list of possible", "a set of `feature` outputs to decide what to put", "environment variables or pipes to pass data between the helper", "its affiliates. # # This source code is licensed under", "make assertions about targets that fail to build. Since we", "image builds from legacy imperative systems. - The image compiler", "can override the default values fetched from `REPO_CFG[flavor].flavor_to_config`. - `mount_config`:", "only have the ability to test the \"good\" targets, it", "e.g. the user renames their repo, or similar. These practices", "image compiler needs a litte extra code to walk the", "or similar. These practices include: - The output JSON must", "successes **and** failures thoroughly. ### Output We mark `image.layer` uncacheable,", "the converter's inputs -- do not read data from disk,", "from `feature.new`. - `flavor`: Picks default build options for the", "to download the dependencies of the ancestor layers' features. Doing", "layer, including `build_appliance`, RPM installer, and others. See `flavor_helpers.bzl` for", "name, current_target = image_utils.current_target(name), parent_layer = parent_layer, features = features,", "this converter minimal is that `buck test` cannot make assertions", "might cause the bytes of the command to vary between", "its successes **and** failures thoroughly. ### Output We mark `image.layer`", "see our `attrfilter` queries below. Without this, Buck would merrily", "the \"good\" targets, it behooves us to put most logic", "non-pure post-processing to a layer. In terms of engineering, both", "all paths, including those to helper scripts. We rely on", "parent_layer = None, features = None, flavor = None, flavor_config_override", "to serialize a btrfs subvolume (for that, we have `package.new`).", "Without this, Buck would merrily fetch the just the `feature`", "cache efficiency -- we don't have to download the dependencies", "most logic in external scripts, so that we can unit-test", "both of these non-pure approaches are a terrible idea and", "into the command, do not do anything that might cause", "thus can be represented as a btrfs send-stream for more", "present on disk -- see our `attrfilter` queries below. Without", "dependencies between features. Since feature dependencies are expected to routinely", "a btrfs send-stream for more efficient storage & distribution. The", "between runs. To achieve this, we use Buck macros to", "We mark `image.layer` uncacheable, because there's no easy way to", "output of an `image.layer` target is a JSON file with", "store no absolute paths. - Store Buck target paths instead", "be delegated to subcommands. ### Command In composing the `bash`", "with additional metadata into each layer, and using that metadata", "The consequences of this information hiding are: - Better Buck", "\"unobservable\" dependencies between features. Since feature dependencies are expected to", "to keep this converter minimal is that `buck test` cannot", "to teach Buck to serialize a btrfs subvolume (for that,", "about targets that fail to build. Since we only have", "`image_layer_utils.bzl` - `runtime`: A list of desired helper buck targets", "dependencies are expected to routinely cross layer boundaries, feature implementations", "rely on environment variables or pipes to pass data between", "image. We do NOT need the direct dependencies of the", "- The output JSON must store no absolute paths. -", "file in the root directory of this source tree. \"\"\"", "Buck output of an `image.layer` target is a JSON file", "that would be wasteful, since those bits are redundant with", "post-processing to a layer. In terms of engineering, both of", "machines or between runs. To achieve this, we use Buck", "interactions with the buck target graph. Everything else should be", "features, because we treat the parent layer as a black", "(for that, we have `package.new`). That said, we should still", "its cache, and not provide us with any of the", "from its cache, and not provide us with any of", "into the btrfs subvolume. These outputs are actually just JSON", "with what's in the parent. - Ability to use genrule", "compiler needs a litte extra code to walk the parent", "included in the list by default. See the field in", "of `feature` with some additional parameters. Its purpose to materialize", "not read data from disk, do not insert disk paths", "maintainability headache, but they do provide a useful bridge for", "subvolume a \"layer\" because it can be built on top", "is that `buck test` cannot make assertions about targets that", "of a `feature` of a parent layer. See the field", "a JSON file with information on how to find the", "do NOT need the direct dependencies of the parent layer's", "but they do provide a useful bridge for transitioning to", "\"layer\" because it can be built on top of a", "of the ancestor layers' features. Doing that would be wasteful,", "contain the data to be written into the image. Therefore,", "= image_utils.current_target(name), parent_layer = parent_layer, features = features, flavor =", "this, Buck would merrily fetch the just the `feature` JSONs", "and/or nameless structs from `feature.new`. - `flavor`: Picks default build", "with any of the buid artifacts that comprise the image.", "possible helpers, their respective behaviours, and how to invoke them.", "keep this converter minimal is that `buck test` cannot make", "= None, flavor_config_override = None, antlir_rule = \"user-internal\", **image_layer_kwargs): \"\"\"", "the ability to test the \"good\" targets, it behooves us", "-- we don't have to download the dependencies of the", "is licensed under the MIT license found in the #", "do not do anything that might cause the bytes of", "that the parent layer implementation can do. NB: This is", "comprise the image. We do NOT need the direct dependencies", "`container` is always included in the list by default. See", "-- see our `attrfilter` queries below. Without this, Buck would", "minimal is that `buck test` cannot make assertions about targets", "of possible helpers, their respective behaviours, and how to invoke", "can be built on top of a snapshot of its", "Build a new layer. It may be empty. _make_subvol_cmd =", "_make_subvol_cmd = compile_image_features( name = name, current_target = image_utils.current_target(name), parent_layer", "`feature` outputs to decide what to put into the btrfs", "their repo, or similar. These practices include: - The output", "including `build_appliance`, RPM installer, and others. See `flavor_helpers.bzl` for details.", "it provides (and we don't care about how). The consequences", "options for the layer, including `build_appliance`, RPM installer, and others.", "# Copyright (c) Facebook, Inc. and its affiliates. # #", "cause the bytes of the command to vary between machines", "build. Since we only have the ability to test the", "layers / apply non-pure post-processing to a layer. In terms", "command, our core maxim is: make it a hermetic function", "- The image compiler needs a litte extra code to", "from disk, do not insert disk paths into the command,", "to materialize those `feature`s as a btrfs subvolume in the", "legacy imperative systems. - The image compiler needs a litte", "the `feature` JSONs from its cache, and not provide us", "the parent layer and determine what it provides. - We", "respective behaviours, and how to invoke them. \"\"\" image_layer_utils.image_layer_impl( _rule_type", "install its features. - `features`: List of `feature` target paths", "assertions about targets that fail to build. Since we only", "as a btrfs send-stream for more efficient storage & distribution.", "- `parent_layer`: The name of another `image_layer` target, on top", "any of the buid artifacts that comprise the image. We", "just the `feature` JSONs from its cache, and not provide", "and not provide us with any of the buid artifacts", "including those to helper scripts. We rely on environment variables", "set of `feature` with some additional parameters. Its purpose to", "code to walk the parent layer and determine what it", "The name of another `image_layer` target, on top of which", "installer, and others. See `flavor_helpers.bzl` for details. - `flavor_config_override`: A", "as a btrfs subvolume in the per-repo `buck-image/out/volume/targets`. We call", "boundaries, feature implementations are forced only to depend on data", "in `_image_layer_impl` in `image_layer_utils.bzl` and the [docs](/docs/tutorials/helper-buck-targets#imagelayer) for the list", "function of the converter's inputs -- do not read data", "the future by writing a manifest with additional metadata into", "-- whatever it has laid down in the image, that's", "and others. See `flavor_helpers.bzl` for details. - `flavor_config_override`: A struct", "don't care about how). The consequences of this information hiding", "consequences of this information hiding are: - Better Buck cache", "btrfs subvolume. These outputs are actually just JSON files that", "`flavor`: Picks default build options for the layer, including `build_appliance`,", "load(\":image_utils.bzl\", \"image_utils\") def image_layer( name, parent_layer = None, features =", "flavor = flavor, flavor_config_override = flavor_config_override, ), antlir_rule = antlir_rule,", "a black box -- whatever it has laid down in", "`SubvolumeOnDisk.to_json_file`. ## Implementation notes The implementation of this converter deliberately", "`feature`s to be present on disk -- see our `attrfilter`", "because it can be built on top of a snapshot", "of `feature` target paths and/or nameless structs from `feature.new`. -", "`buck-image/out/volume/targets`. We call the subvolume a \"layer\" because it can", "filesystem -- since this is all that the parent layer", "subvolume (for that, we have `package.new`). That said, we should", "`parent_layer`: The name of another `image_layer` target, on top of", "JSON must store no absolute paths. - Store Buck target", "the field in `_image_layer_impl` in `image_layer_utils.bzl` - `runtime`: A list", "purpose to materialize those `feature`s as a btrfs subvolume in", "be represented as a btrfs send-stream for more efficient storage", "(and we don't care about how). The consequences of this", "between features. Since feature dependencies are expected to routinely cross", "The image compiler needs a litte extra code to walk", "- `runtime`: A list of desired helper buck targets to", "implementation can do. NB: This is easy to relax in", "Inc. and its affiliates. # # This source code is", "parameters. Its purpose to materialize those `feature`s as a btrfs", "anything that might cause the bytes of the command to", "these non-pure approaches are a terrible idea and a maintainability", "laid down in the image, that's what it provides (and", "A list of desired helper buck targets to be emitted.", "provide a useful bridge for transitioning to Buck image builds", "its `feature`s to be present on disk -- see our", "values fetched from `REPO_CFG[flavor].flavor_to_config`. - `mount_config`: Specifies how this layer", "= \"user-internal\", **image_layer_kwargs): \"\"\" Arguments - `parent_layer`: The name of", "into the image. Therefore, `image.layer` has to explicitly tell buck", "`image.layer` is a set of `feature` with some additional parameters.", "headache, but they do provide a useful bridge for transitioning", "List of `feature` target paths and/or nameless structs from `feature.new`.", "list by default. See the field in `_image_layer_impl` in `image_layer_utils.bzl`", "for more efficient storage & distribution. The Buck output of", "the parent layer implementation can do. NB: This is easy", "`image.layer` has to explicitly tell buck that it needs all", "is easy to relax in the future by writing a", "`feature.new`. - `flavor`: Picks default build options for the layer,", "targets to be emitted. `container` is always included in the", "to pass data between the helper scripts. Another reason to", "output directory. ### Dependency resolution An `image.layer` consumes a set", "can do. NB: This is easy to relax in the", "Everything else should be delegated to subcommands. ### Command In", "\"good\" targets, it behooves us to put most logic in", "if e.g. the user renames their repo, or similar. These", "name = name, current_target = image_utils.current_target(name), parent_layer = parent_layer, features", "targets, it behooves us to put most logic in external", "layer, and using that metadata during compilation. \"\"\" load(\":compile_image_features.bzl\", \"compile_image_features\")", "do provide a useful bridge for transitioning to Buck image", "\"\"\" image_layer_utils.image_layer_impl( _rule_type = \"image_layer\", _layer_name = name, # Build", "storage & distribution. The Buck output of an `image.layer` target", "per-repo `buck-image/out/volume/targets`. See `SubvolumeOnDisk.to_json_file`. ## Implementation notes The implementation of", "JSONs from its cache, and not provide us with any", "a layer. In terms of engineering, both of these non-pure", "into the output directory. ### Dependency resolution An `image.layer` consumes", "the default values fetched from `REPO_CFG[flavor].flavor_to_config`. - `mount_config`: Specifies how", "don't have to download the dependencies of the ancestor layers'", "bytes of the command to vary between machines or between", "fetch the just the `feature` JSONs from its cache, and" ]
[]
[ "promt_tr ''' import sys from random import randint from promt_tr", "in ['zh', 'de', 'fr', 'it', 'es']: resu = promt_tr(text, from_lang,", "English text, with an optional to_lang') print('E.g., python -m promt_tr", "= 'auto' to_lang = 'zh' text = 'test ' +", "' '.join(argv) for to_lang in ['zh', 'de', 'fr', 'it', 'es']:", "text, with an optional to_lang') print('E.g., python -m promt_tr test", "not sys.argv[1:]: print('Provide some English text, with an optional to_lang')", "promt_tr, LANG_CODES # pragma: no cover def main(): '''main''' from_lang", "to_lang') print('E.g., python -m promt_tr test this and that de')", "to run: python -m promt_tr ''' import sys from random", "import promt_tr, LANG_CODES # pragma: no cover def main(): '''main'''", "len(argv) if len_ == 1: if argv[0] in LANG_CODES: to_lang", "str(randint(0, 10000)) if not sys.argv[1:]: print('Provide some English text, with", "'''main''' from_lang = 'auto' to_lang = 'zh' text = 'test", "'auto' to_lang = 'zh' text = 'test ' + str(randint(0,", "+ str(randint(0, 10000)) if not sys.argv[1:]: print('Provide some English text,", "this and that de') print('Testing with some random text\\n') else:", "with an optional to_lang') print('E.g., python -m promt_tr test this", "__main__, to run: python -m promt_tr ''' import sys from", "= sys.argv[1:] len_ = len(argv) if len_ == 1: if", "= ' '.join(argv[:-1]) else: text = ' '.join(argv) for to_lang", "some random text\\n') else: argv = sys.argv[1:] len_ = len(argv)", "from_lang, to_lang) print(f'[{text}] translated to [{to_lang}]: [{resu}]') if __name__ ==", "1: if argv[0] in LANG_CODES: to_lang = argv[0] else: text", "from_lang = 'auto' to_lang = 'zh' text = 'test '", "from promt_tr import promt_tr, LANG_CODES # pragma: no cover def", "-m promt_tr ''' import sys from random import randint from", "to_lang) print(f'[{text}] translated to [{to_lang}]: [{resu}]') if __name__ == '__main__':", "to_lang = 'zh' text = 'test ' + str(randint(0, 10000))", "def main(): '''main''' from_lang = 'auto' to_lang = 'zh' text", "an optional to_lang') print('E.g., python -m promt_tr test this and", "elif argv[-1] in LANG_CODES: to_lang = argv[-1] text = '", "argv[-1] text = ' '.join(argv[:-1]) else: text = ' '.join(argv)", "'test ' + str(randint(0, 10000)) if not sys.argv[1:]: print('Provide some", "sys.argv[1:] len_ = len(argv) if len_ == 1: if argv[0]", "'.join(argv) for to_lang in ['zh', 'de', 'fr', 'it', 'es']: resu", "'zh' text = 'test ' + str(randint(0, 10000)) if not", "run: python -m promt_tr ''' import sys from random import", "cover def main(): '''main''' from_lang = 'auto' to_lang = 'zh'", "import sys from random import randint from promt_tr import promt_tr,", "sys.argv[1:]: print('Provide some English text, with an optional to_lang') print('E.g.,", "if not sys.argv[1:]: print('Provide some English text, with an optional", "len_ == 1: if argv[0] in LANG_CODES: to_lang = argv[0]", "print(f'[{text}] translated to [{to_lang}]: [{resu}]') if __name__ == '__main__': main()", "random text\\n') else: argv = sys.argv[1:] len_ = len(argv) if", "main(): '''main''' from_lang = 'auto' to_lang = 'zh' text =", "some English text, with an optional to_lang') print('E.g., python -m", "if argv[0] in LANG_CODES: to_lang = argv[0] else: text =", "test this and that de') print('Testing with some random text\\n')", "10000)) if not sys.argv[1:]: print('Provide some English text, with an", "in LANG_CODES: to_lang = argv[0] else: text = argv[0] elif", "= 'zh' text = 'test ' + str(randint(0, 10000)) if", "resu = promt_tr(text, from_lang, to_lang) print(f'[{text}] translated to [{to_lang}]: [{resu}]')", "else: argv = sys.argv[1:] len_ = len(argv) if len_ ==", "that de') print('Testing with some random text\\n') else: argv =", "argv[0] else: text = argv[0] elif argv[-1] in LANG_CODES: to_lang", "= ' '.join(argv) for to_lang in ['zh', 'de', 'fr', 'it',", "= len(argv) if len_ == 1: if argv[0] in LANG_CODES:", "de') print('Testing with some random text\\n') else: argv = sys.argv[1:]", "LANG_CODES: to_lang = argv[-1] text = ' '.join(argv[:-1]) else: text", "argv[-1] in LANG_CODES: to_lang = argv[-1] text = ' '.join(argv[:-1])", "# pragma: no cover def main(): '''main''' from_lang = 'auto'", "print('Testing with some random text\\n') else: argv = sys.argv[1:] len_", "['zh', 'de', 'fr', 'it', 'es']: resu = promt_tr(text, from_lang, to_lang)", "sys from random import randint from promt_tr import promt_tr, LANG_CODES", "= argv[-1] text = ' '.join(argv[:-1]) else: text = '", "= argv[0] else: text = argv[0] elif argv[-1] in LANG_CODES:", "no cover def main(): '''main''' from_lang = 'auto' to_lang =", "for to_lang in ['zh', 'de', 'fr', 'it', 'es']: resu =", "'fr', 'it', 'es']: resu = promt_tr(text, from_lang, to_lang) print(f'[{text}] translated", "argv[0] elif argv[-1] in LANG_CODES: to_lang = argv[-1] text =", "to_lang = argv[-1] text = ' '.join(argv[:-1]) else: text =", "text = argv[0] elif argv[-1] in LANG_CODES: to_lang = argv[-1]", "promt_tr(text, from_lang, to_lang) print(f'[{text}] translated to [{to_lang}]: [{resu}]') if __name__", "import randint from promt_tr import promt_tr, LANG_CODES # pragma: no", "optional to_lang') print('E.g., python -m promt_tr test this and that", "randint from promt_tr import promt_tr, LANG_CODES # pragma: no cover", "== 1: if argv[0] in LANG_CODES: to_lang = argv[0] else:", "and that de') print('Testing with some random text\\n') else: argv", "'es']: resu = promt_tr(text, from_lang, to_lang) print(f'[{text}] translated to [{to_lang}]:", "promt_tr import promt_tr, LANG_CODES # pragma: no cover def main():", "'.join(argv[:-1]) else: text = ' '.join(argv) for to_lang in ['zh',", "print('E.g., python -m promt_tr test this and that de') print('Testing", "to_lang = argv[0] else: text = argv[0] elif argv[-1] in", "len_ = len(argv) if len_ == 1: if argv[0] in", "print('Provide some English text, with an optional to_lang') print('E.g., python", "else: text = ' '.join(argv) for to_lang in ['zh', 'de',", "argv = sys.argv[1:] len_ = len(argv) if len_ == 1:", "' + str(randint(0, 10000)) if not sys.argv[1:]: print('Provide some English", "text = ' '.join(argv[:-1]) else: text = ' '.join(argv) for", "<filename>promt_tr/__main__.py ''' __main__, to run: python -m promt_tr ''' import", "python -m promt_tr test this and that de') print('Testing with", "else: text = argv[0] elif argv[-1] in LANG_CODES: to_lang =", "''' import sys from random import randint from promt_tr import", "'it', 'es']: resu = promt_tr(text, from_lang, to_lang) print(f'[{text}] translated to", "to_lang in ['zh', 'de', 'fr', 'it', 'es']: resu = promt_tr(text,", "LANG_CODES: to_lang = argv[0] else: text = argv[0] elif argv[-1]", "= promt_tr(text, from_lang, to_lang) print(f'[{text}] translated to [{to_lang}]: [{resu}]') if", "= 'test ' + str(randint(0, 10000)) if not sys.argv[1:]: print('Provide", "promt_tr test this and that de') print('Testing with some random", "in LANG_CODES: to_lang = argv[-1] text = ' '.join(argv[:-1]) else:", "' '.join(argv[:-1]) else: text = ' '.join(argv) for to_lang in", "with some random text\\n') else: argv = sys.argv[1:] len_ =", "text = ' '.join(argv) for to_lang in ['zh', 'de', 'fr',", "text\\n') else: argv = sys.argv[1:] len_ = len(argv) if len_", "''' __main__, to run: python -m promt_tr ''' import sys", "LANG_CODES # pragma: no cover def main(): '''main''' from_lang =", "pragma: no cover def main(): '''main''' from_lang = 'auto' to_lang", "-m promt_tr test this and that de') print('Testing with some", "argv[0] in LANG_CODES: to_lang = argv[0] else: text = argv[0]", "if len_ == 1: if argv[0] in LANG_CODES: to_lang =", "text = 'test ' + str(randint(0, 10000)) if not sys.argv[1:]:", "= argv[0] elif argv[-1] in LANG_CODES: to_lang = argv[-1] text", "python -m promt_tr ''' import sys from random import randint", "random import randint from promt_tr import promt_tr, LANG_CODES # pragma:", "'de', 'fr', 'it', 'es']: resu = promt_tr(text, from_lang, to_lang) print(f'[{text}]", "from random import randint from promt_tr import promt_tr, LANG_CODES #" ]
[ "pd.options.mode.chained_assignment = None gb = df_t_v3_s.groupby(['gameId', 'playId']) print('Getting closest players...')", "on='gameId') m_grouped = games_n_plays_df.groupby(['gameId', 'playId']) df_t = tracking_df.merge(games_n_plays_df, how='left', on=['gameId',", "df_t_v3 = df_t_v3[(df_t_v3.gameId != game_id) | (df_t_v3.playId != play_id)] df_t_v3_s", "'dir', 'event', 'team', 'gameId', 'playId', 'frameId', 'isDefensivePI' ]] gb_2 =", "ball.y), 'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x, ball.y), 'att_s': p_att.s, 'def_s': p_def.s,", "how='inner', on='gameId') m_grouped = games_n_plays_df.groupby(['gameId', 'playId']) df_t = tracking_df.merge(games_n_plays_df, how='left',", "home_has_possession(row), axis=1 ) clean_df['teamSituation'] = clean_df.apply( lambda row: calculate_team_sitation(row), axis=1", "!= 'pass_forward'): df_t_v3 = df_t_v3[(df_t_v3.gameId != game_id) | (df_t_v3.playId !=", "calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y), 'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x, ball.y), 'def_ball_d':", "= clean_df.groupby(['gameId', 'playId', 'frameId']) # ball direction and orientation are", "ValueError: print('Value Error raised. This group will be skipped.') continue", "group in df_t_grouped: game_id, play_id = name # if group", "import sys import pandas as pd from datetime import datetime", "'tackle': check_group_event(group_events, 'tackle'), 'first_contact': check_group_event(group_events, 'first_contact'), 'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'), 'out_of_bounds':", "'pass_forward'): df_t_v3 = df_t_v3[(df_t_v3.gameId != game_id) | (df_t_v3.playId != play_id)]", "# remove all values before 'pass_forward' print(\"Removing all values before", ") print('Normalize...') clean_df = normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession'] = clean_df.apply( lambda row:", "= clean_df[[ 'time', 'x', 'y', 's', 'o', 'dir', 'event', 'team',", "group in gb_2: game_id, play_id, frameId = name if len(group)", "'def_a': p_def.a, 'ball_a': ball.a, 'att_o': p_att.o, 'def_o': p_def.o, 'att_dir': p_att.dir,", "1 remove_start = group.index.min() df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment = None", "'ball_a': ball.a, 'att_o': p_att.o, 'def_o': p_def.o, 'att_dir': p_att.dir, 'def_dir': p_def.dir,", "row: calculate_team_sitation(row), axis=1 ) print('Creating features...') min_df = clean_df[[ 'time',", "game_id) | (df_t_v3.playId != play_id)] df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId', 'time',", "= clean_df.apply( lambda row: calculate_team_sitation(row), axis=1 ) print('Creating features...') min_df", "= situation_df[situation_df.team == 'football'].head(1) # remove ball player_situation_df = situation_df[situation_df.team", "play_df.merge(week_and_games, how='inner', on='gameId') m_grouped = games_n_plays_df.groupby(['gameId', 'playId']) df_t = tracking_df.merge(games_n_plays_df,", "pd.DataFrame( columns=[ 'time', 'att_def_d', 'att_ball_d', 'def_ball_d', 'att_s', 'def_s', 'ball_s', 'att_o',", "except IndexError: print('Number of events is < 3, skipping...') continue", "in df_t_grouped: game_id, play_id = name pass_forward_frame_id = group[group.event ==", "group.index.min() df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment = None gb = df_t_v3_s.groupby(['gameId',", "= 3 for name, group in gb_2: game_id, play_id, frameId", "game_id, play_id = name pass_forward_frame_id = group[group.event == 'pass_forward'].index.min() -", "\\ normalize_according_to_play_direction, check_group_event from src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation week_num =", "games_df[games_df.week == week_num] tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv')) print(\"Data loaded. Start", "dict_to_append = { 'time': group_row.time, 'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y),", "clean_df.apply( lambda row: home_has_possession(row), axis=1 ) clean_df['teamSituation'] = clean_df.apply( lambda", "if len(group) < GROUP_SIZE_MINIMUM: continue ball = group[group.teamSituation == 'football'].head(1).squeeze()", "'time', 'event']) df_t_v3_s = df_t_v3_s.reset_index(drop=True) df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId']) #", "= get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item()) except ValueError: print('Value Error raised. This", "df_t_v3 = df_t.copy().sort_index() for name, group in df_t_grouped: game_id, play_id", "skipping...') continue situation_df = group[group.event == event_3rd] # convert dataframe", "'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x, ball.y), 'att_s': p_att.s, 'def_s': p_def.s, 'ball_s':", "pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv')) week_and_games = games_df[games_df.week == week_num] tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR,", "group.event.unique()[2] except IndexError: print('Number of events is < 3, skipping...')", "'pass_arrived'), 'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'), 'tackle': check_group_event(group_events, 'tackle'), 'first_contact': check_group_event(group_events, 'first_contact'),", "name, group in gb_2: game_id, play_id, frameId = name if", "gb: game_id, play_id = name try: event_3rd = group.event.unique()[2] except", "in gb_2: game_id, play_id, frameId = name if len(group) <", "values before pass forward event...\") for name, group in df_t_grouped:", "dict_to_append, ignore_index=True ) print(\"Saving data...\") calc_df.to_csv( data_v3.get_step1_end_path(week_num), index=False ) print(f'End", "name # if group does not contain pass forward, drop", "pass forward event...\") for name, group in df_t_grouped: game_id, play_id", "situation_df[situation_df.team == 'football'].head(1) # remove ball player_situation_df = situation_df[situation_df.team !=", "= games_n_plays_df.groupby(['gameId', 'playId']) df_t = tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId']) #", "from src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation week_num = int(sys.argv[1]) data_v3 =", "direction and orientation are NaN calc_df = pd.DataFrame( columns=[ 'time',", "print('Normalize...') clean_df = normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession'] = clean_df.apply( lambda row: home_has_possession(row),", "does not contain pass forward, drop it if all(group.event !=", "games_n_plays_df.groupby(['gameId', 'playId']) df_t = tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId']) # Remove", "as pd from datetime import datetime from settings import RAW_DATA_DIR,", "'frameId', 'isDefensivePI' ]] gb_2 = clean_df.groupby(['gameId', 'playId', 'frameId']) # ball", "if all(group.event != 'pass_forward'): df_t_v3 = df_t_v3[(df_t_v3.gameId != game_id) |", "DATA_V3_SUBVERSION from src.features.helpers.processing import add_missing_timestamp_values from src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices,", "p1, p2 = get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item()) except ValueError: print('Value Error", "p_att.a, 'def_a': p_def.a, 'ball_a': ball.a, 'att_o': p_att.o, 'def_o': p_def.o, 'att_dir':", "'gameId', 'playId', 'frameId', 'isDefensivePI' ] ) GROUP_SIZE_MINIMUM = 3 for", "DataV3(DATA_V3_SUBVERSION) save_file_path = data_v3.get_step1_checkpoint_path(week_num) try: clean_df = pd.read_csv(save_file_path) save_file_exists =", "frameId = name if len(group) < GROUP_SIZE_MINIMUM: continue ball =", "'att_dir', 'def_dir', 'event', 'gameId', 'playId', 'frameId', 'isDefensivePI' ] ) GROUP_SIZE_MINIMUM", "== 'attacking'].head(1).squeeze() p_def = group[group.teamSituation == 'defending'].head(1).squeeze() group_row = group.head(1).squeeze()", "group[group.teamSituation == 'attacking'].head(1).squeeze() p_def = group[group.teamSituation == 'defending'].head(1).squeeze() group_row =", "'playId', 'frameId', 'isDefensivePI' ] ) GROUP_SIZE_MINIMUM = 3 for name,", "print('Value Error raised. This group will be skipped.') continue p_n_b_indices", "'defending'].head(1).squeeze() group_row = group.head(1).squeeze() group_events = group.event.unique().tolist() dict_to_append = {", "| (df_t_v3.playId != play_id)] df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event'])", "int(sys.argv[1]) data_v3 = DataV3(DATA_V3_SUBVERSION) save_file_path = data_v3.get_step1_checkpoint_path(week_num) try: clean_df =", "# remove ball player_situation_df = situation_df[situation_df.team != 'football'] try: p1,", "'event', 'team', 'gameId', 'playId', 'frameId', 'isDefensivePI' ]] gb_2 = clean_df.groupby(['gameId',", "GROUP_SIZE_MINIMUM: continue ball = group[group.teamSituation == 'football'].head(1).squeeze() p_att = group[group.teamSituation", "NaN calc_df = pd.DataFrame( columns=[ 'time', 'att_def_d', 'att_ball_d', 'def_ball_d', 'att_s',", "'gameId', 'playId', 'frameId', 'isDefensivePI' ]] gb_2 = clean_df.groupby(['gameId', 'playId', 'frameId'])", "p_att.y, p_def.x, p_def.y), 'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x, ball.y), 'def_ball_d': calculate_distance(p_def.x,", "event...\") for name, group in df_t_grouped: game_id, play_id = name", "= pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv')) week_and_games = games_df[games_df.week == week_num] tracking_df =", "gb = df_t_v3_s.groupby(['gameId', 'playId']) print('Getting closest players...') keep_indices = []", "= group[group.teamSituation == 'attacking'].head(1).squeeze() p_def = group[group.teamSituation == 'defending'].head(1).squeeze() group_row", "normalize_according_to_play_direction, check_group_event from src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation week_num = int(sys.argv[1])", "= pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv')) print(\"Data loaded. Start processing timestamps\") tracking_df =", "axis=1 ) print('Creating features...') min_df = clean_df[[ 'time', 'x', 'y',", "play_id = name pass_forward_frame_id = group[group.event == 'pass_forward'].index.min() - 1", "min_df = clean_df[[ 'time', 'x', 'y', 's', 'o', 'dir', 'event',", "ball direction and orientation are NaN calc_df = pd.DataFrame( columns=[", "= group[group.teamSituation == 'football'].head(1).squeeze() p_att = group[group.teamSituation == 'attacking'].head(1).squeeze() p_def", "df_t_v3_s.groupby(['gameId', 'playId']) # remove all values before 'pass_forward' print(\"Removing all", "= df_t.copy().sort_index() for name, group in df_t_grouped: game_id, play_id =", "= df_t_v3_s.groupby(['gameId', 'playId']) print('Getting closest players...') keep_indices = [] for", "closest players...') keep_indices = [] for name, group in gb:", "from src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices, calculate_distance, \\ normalize_according_to_play_direction, check_group_event from", "'playId']) # Remove all events without 'pass_forward' df_t_grouped = df_t.groupby(['gameId',", "3 for name, group in gb_2: game_id, play_id, frameId =", "from src.features.helpers.processing import add_missing_timestamp_values from src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices, calculate_distance,", "calculate_team_sitation week_num = int(sys.argv[1]) data_v3 = DataV3(DATA_V3_SUBVERSION) save_file_path = data_v3.get_step1_checkpoint_path(week_num)", "ball.x, ball.y), 'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x, ball.y), 'att_s': p_att.s, 'def_s':", "'frameId': group_row.frameId, 'isDefensivePI': group_row.isDefensivePI } calc_df = calc_df.append( dict_to_append, ignore_index=True", "src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices, calculate_distance, \\ normalize_according_to_play_direction, check_group_event from src.features.helpers.processing_v4", "get_closest_players, get_players_and_ball_indices, calculate_distance, \\ normalize_according_to_play_direction, check_group_event from src.features.helpers.processing_v4 import home_has_possession,", "pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv')) print(\"Data loaded. Start processing timestamps\") tracking_df = add_missing_timestamp_values(tracking_df)", "= name pass_forward_frame_id = group[group.event == 'pass_forward'].index.min() - 1 remove_start", "row: home_has_possession(row), axis=1 ) clean_df['teamSituation'] = clean_df.apply( lambda row: calculate_team_sitation(row),", "'x', 'y', 's', 'o', 'dir', 'event', 'team', 'gameId', 'playId', 'frameId',", "all events without 'pass_forward' df_t_grouped = df_t.groupby(['gameId', 'playId']) df_t_v3 =", "check_group_event(group_events, 'pass_outcome_caught'), 'tackle': check_group_event(group_events, 'tackle'), 'first_contact': check_group_event(group_events, 'first_contact'), 'pass_outcome_incomplete': check_group_event(group_events,", "raised. This group will be skipped.') continue p_n_b_indices = get_players_and_ball_indices(group,", "play_id, frameId = name if len(group) < GROUP_SIZE_MINIMUM: continue ball", "'att_s': p_att.s, 'def_s': p_def.s, 'ball_s': ball.s, 'att_a': p_att.a, 'def_a': p_def.a,", "event_3rd = group.event.unique()[2] except IndexError: print('Number of events is <", "df_t_v3_s = df_t_v3_s.reset_index(drop=True) df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId']) # remove all", "= group[group.event == event_3rd] # convert dataframe into series ball_row", "'week': week_num, 'gameId': group_row.gameId, 'playId': group_row.playId, 'frameId': group_row.frameId, 'isDefensivePI': group_row.isDefensivePI", "GROUP_SIZE_MINIMUM = 3 for name, group in gb_2: game_id, play_id,", "f'week{week_num}.csv')) print(\"Data loaded. Start processing timestamps\") tracking_df = add_missing_timestamp_values(tracking_df) games_n_plays_df", "'event']) df_t_v3_s = df_t_v3_s.reset_index(drop=True) df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId']) # remove", "clean_df = normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession'] = clean_df.apply( lambda row: home_has_possession(row), axis=1", "group_row = group.head(1).squeeze() group_events = group.event.unique().tolist() dict_to_append = { 'time':", "- 1 remove_start = group.index.min() df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment =", "!= 'football'] try: p1, p2 = get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item()) except", "situation_df[situation_df.team != 'football'] try: p1, p2 = get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item())", "'isDefensivePI': group_row.isDefensivePI } calc_df = calc_df.append( dict_to_append, ignore_index=True ) print(\"Saving", "'football'].head(1) # remove ball player_situation_df = situation_df[situation_df.team != 'football'] try:", "= True except FileNotFoundError: save_file_exists = False if not save_file_exists:", "group will be skipped.') continue p_n_b_indices = get_players_and_ball_indices(group, p1, p2)", "pandas as pd from datetime import datetime from settings import", "'playId', 'time', 'event']) df_t_v3_s = df_t_v3_s.reset_index(drop=True) df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId'])", "not save_file_exists: print(\"Started loading data\") play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv')) games_df", "features...') min_df = clean_df[[ 'time', 'x', 'y', 's', 'o', 'dir',", "'isDefensivePI' ]] gb_2 = clean_df.groupby(['gameId', 'playId', 'frameId']) # ball direction", "p2) if p_n_b_indices: keep_indices.extend(p_n_b_indices) clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv( save_file_path, index=False", "dataframe into series ball_row = situation_df[situation_df.team == 'football'].head(1) # remove", ") print('Creating features...') min_df = clean_df[[ 'time', 'x', 'y', 's',", "= name # if group does not contain pass forward,", "group[group.event == event_3rd] # convert dataframe into series ball_row =", "group_row.playId, 'frameId': group_row.frameId, 'isDefensivePI': group_row.isDefensivePI } calc_df = calc_df.append( dict_to_append,", "clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv( save_file_path, index=False ) print('Normalize...') clean_df =", "'time', 'x', 'y', 's', 'o', 'dir', 'event', 'team', 'gameId', 'playId',", "'def_s': p_def.s, 'ball_s': ball.s, 'att_a': p_att.a, 'def_a': p_def.a, 'ball_a': ball.a,", "ball_row.y.item()) except ValueError: print('Value Error raised. This group will be", "'att_def_d', 'att_ball_d', 'def_ball_d', 'att_s', 'def_s', 'ball_s', 'att_o', 'def_o', 'att_dir', 'def_dir',", "group_row.event, 'pass_arrived': check_group_event(group_events, 'pass_arrived'), 'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'), 'tackle': check_group_event(group_events, 'tackle'),", "os import sys import pandas as pd from datetime import", "get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item()) except ValueError: print('Value Error raised. This group", "week_num] tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv')) print(\"Data loaded. Start processing timestamps\")", "group in df_t_grouped: game_id, play_id = name pass_forward_frame_id = group[group.event", "ignore_index=True ) print(\"Saving data...\") calc_df.to_csv( data_v3.get_step1_end_path(week_num), index=False ) print(f'End time:", "processing timestamps\") tracking_df = add_missing_timestamp_values(tracking_df) games_n_plays_df = play_df.merge(week_and_games, how='inner', on='gameId')", "group does not contain pass forward, drop it if all(group.event", "clean_df.groupby(['gameId', 'playId', 'frameId']) # ball direction and orientation are NaN", "< GROUP_SIZE_MINIMUM: continue ball = group[group.teamSituation == 'football'].head(1).squeeze() p_att =", "p_att.s, 'def_s': p_def.s, 'ball_s': ball.s, 'att_a': p_att.a, 'def_a': p_def.a, 'ball_a':", "from datetime import datetime from settings import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION", "= [] for name, group in gb: game_id, play_id =", "'attacking'].head(1).squeeze() p_def = group[group.teamSituation == 'defending'].head(1).squeeze() group_row = group.head(1).squeeze() group_events", "check_group_event(group_events, 'first_contact'), 'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'), 'out_of_bounds': check_group_event(group_events, 'out_of_bounds'), 'week': week_num,", "= df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event']) df_t_v3_s = df_t_v3_s.reset_index(drop=True) df_t_grouped =", "= int(sys.argv[1]) data_v3 = DataV3(DATA_V3_SUBVERSION) save_file_path = data_v3.get_step1_checkpoint_path(week_num) try: clean_df", "ball_row = situation_df[situation_df.team == 'football'].head(1) # remove ball player_situation_df =", "clean_df.to_csv( save_file_path, index=False ) print('Normalize...') clean_df = normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession'] =", "get_players_and_ball_indices(group, p1, p2) if p_n_b_indices: keep_indices.extend(p_n_b_indices) clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv(", "df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId']) # remove all values before 'pass_forward'", "df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv( save_file_path, index=False ) print('Normalize...') clean_df = normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession']", "save_file_path, index=False ) print('Normalize...') clean_df = normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession'] = clean_df.apply(", "len(group) < GROUP_SIZE_MINIMUM: continue ball = group[group.teamSituation == 'football'].head(1).squeeze() p_att", "for name, group in gb: game_id, play_id = name try:", "and orientation are NaN calc_df = pd.DataFrame( columns=[ 'time', 'att_def_d',", "]] gb_2 = clean_df.groupby(['gameId', 'playId', 'frameId']) # ball direction and", "'def_o': p_def.o, 'att_dir': p_att.dir, 'def_dir': p_def.dir, 'event': group_row.event, 'pass_arrived': check_group_event(group_events,", "save_file_exists: print(\"Started loading data\") play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv')) games_df =", "p_att.y, ball.x, ball.y), 'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x, ball.y), 'att_s': p_att.s,", "clean_df.apply( lambda row: calculate_team_sitation(row), axis=1 ) print('Creating features...') min_df =", "'frameId']) # ball direction and orientation are NaN calc_df =", "play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv')) games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv')) week_and_games =", "p_def.s, 'ball_s': ball.s, 'att_a': p_att.a, 'def_a': p_def.a, 'ball_a': ball.a, 'att_o':", "clean_df['teamSituation'] = clean_df.apply( lambda row: calculate_team_sitation(row), axis=1 ) print('Creating features...')", "'pass_forward' print(\"Removing all values before pass forward event...\") for name,", "tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId']) # Remove all events without 'pass_forward'", "'att_dir': p_att.dir, 'def_dir': p_def.dir, 'event': group_row.event, 'pass_arrived': check_group_event(group_events, 'pass_arrived'), 'pass_outcome_caught':", "ball.a, 'att_o': p_att.o, 'def_o': p_def.o, 'att_dir': p_att.dir, 'def_dir': p_def.dir, 'event':", "if p_n_b_indices: keep_indices.extend(p_n_b_indices) clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv( save_file_path, index=False )", "skipped.') continue p_n_b_indices = get_players_and_ball_indices(group, p1, p2) if p_n_b_indices: keep_indices.extend(p_n_b_indices)", "This group will be skipped.') continue p_n_b_indices = get_players_and_ball_indices(group, p1,", "= situation_df[situation_df.team != 'football'] try: p1, p2 = get_closest_players(player_situation_df, ball_row.x.item(),", "df_t_v3_s.groupby(['gameId', 'playId']) print('Getting closest players...') keep_indices = [] for name,", "= df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv( save_file_path, index=False ) print('Normalize...') clean_df = normalize_according_to_play_direction(clean_df)", "import add_missing_timestamp_values from src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices, calculate_distance, \\ normalize_according_to_play_direction,", "values before 'pass_forward' print(\"Removing all values before pass forward event...\")", "group_row.frameId, 'isDefensivePI': group_row.isDefensivePI } calc_df = calc_df.append( dict_to_append, ignore_index=True )", "'playId']) # remove all values before 'pass_forward' print(\"Removing all values", "'def_dir', 'event', 'gameId', 'playId', 'frameId', 'isDefensivePI' ] ) GROUP_SIZE_MINIMUM =", "= add_missing_timestamp_values(tracking_df) games_n_plays_df = play_df.merge(week_and_games, how='inner', on='gameId') m_grouped = games_n_plays_df.groupby(['gameId',", "p_def.dir, 'event': group_row.event, 'pass_arrived': check_group_event(group_events, 'pass_arrived'), 'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'), 'tackle':", "except FileNotFoundError: save_file_exists = False if not save_file_exists: print(\"Started loading", "= group[group.teamSituation == 'defending'].head(1).squeeze() group_row = group.head(1).squeeze() group_events = group.event.unique().tolist()", "'pass_outcome_caught'), 'tackle': check_group_event(group_events, 'tackle'), 'first_contact': check_group_event(group_events, 'first_contact'), 'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'),", "game_id, play_id = name try: event_3rd = group.event.unique()[2] except IndexError:", "before pass forward event...\") for name, group in df_t_grouped: game_id,", "import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION from src.features.helpers.processing import add_missing_timestamp_values from src.features.helpers.processing_v3", "= tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId']) # Remove all events without", "# if group does not contain pass forward, drop it", "'pass_forward'].index.min() - 1 remove_start = group.index.min() df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment", "check_group_event(group_events, 'tackle'), 'first_contact': check_group_event(group_events, 'first_contact'), 'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'), 'out_of_bounds': check_group_event(group_events,", "'s', 'o', 'dir', 'event', 'team', 'gameId', 'playId', 'frameId', 'isDefensivePI' ]]", "datetime import datetime from settings import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION from", "continue situation_df = group[group.event == event_3rd] # convert dataframe into", "name if len(group) < GROUP_SIZE_MINIMUM: continue ball = group[group.teamSituation ==", "p_def.a, 'ball_a': ball.a, 'att_o': p_att.o, 'def_o': p_def.o, 'att_dir': p_att.dir, 'def_dir':", "print('Number of events is < 3, skipping...') continue situation_df =", "'att_o': p_att.o, 'def_o': p_def.o, 'att_dir': p_att.dir, 'def_dir': p_def.dir, 'event': group_row.event,", "'playId']) df_t_v3 = df_t.copy().sort_index() for name, group in df_t_grouped: game_id,", "import pandas as pd from datetime import datetime from settings", "games_n_plays_df = play_df.merge(week_and_games, how='inner', on='gameId') m_grouped = games_n_plays_df.groupby(['gameId', 'playId']) df_t", "continue ball = group[group.teamSituation == 'football'].head(1).squeeze() p_att = group[group.teamSituation ==", "df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event']) df_t_v3_s = df_t_v3_s.reset_index(drop=True) df_t_grouped", "pass_forward_frame_id = group[group.event == 'pass_forward'].index.min() - 1 remove_start = group.index.min()", "print(\"Removing all values before pass forward event...\") for name, group", "df_t_grouped = df_t.groupby(['gameId', 'playId']) df_t_v3 = df_t.copy().sort_index() for name, group", "'time': group_row.time, 'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y), 'att_ball_d': calculate_distance(p_att.x, p_att.y,", "FileNotFoundError: save_file_exists = False if not save_file_exists: print(\"Started loading data\")", "m_grouped = games_n_plays_df.groupby(['gameId', 'playId']) df_t = tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId'])", "df_t_v3[(df_t_v3.gameId != game_id) | (df_t_v3.playId != play_id)] df_t_v3_s = df_t_v3.sort_values(by=['gameId',", "keep_indices.extend(p_n_b_indices) clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv( save_file_path, index=False ) print('Normalize...') clean_df", "print(\"Started loading data\") play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv')) games_df = pd.read_csv(os.path.join(RAW_DATA_DIR,", "gb_2 = clean_df.groupby(['gameId', 'playId', 'frameId']) # ball direction and orientation", "False if not save_file_exists: print(\"Started loading data\") play_df = pd.read_csv(os.path.join(RAW_DATA_DIR,", "home_has_possession, calculate_team_sitation week_num = int(sys.argv[1]) data_v3 = DataV3(DATA_V3_SUBVERSION) save_file_path =", "Error raised. This group will be skipped.') continue p_n_b_indices =", "'gameId': group_row.gameId, 'playId': group_row.playId, 'frameId': group_row.frameId, 'isDefensivePI': group_row.isDefensivePI } calc_df", "tracking_df = add_missing_timestamp_values(tracking_df) games_n_plays_df = play_df.merge(week_and_games, how='inner', on='gameId') m_grouped =", "'att_ball_d', 'def_ball_d', 'att_s', 'def_s', 'ball_s', 'att_o', 'def_o', 'att_dir', 'def_dir', 'event',", "'football'].head(1).squeeze() p_att = group[group.teamSituation == 'attacking'].head(1).squeeze() p_def = group[group.teamSituation ==", "into series ball_row = situation_df[situation_df.team == 'football'].head(1) # remove ball", "group.head(1).squeeze() group_events = group.event.unique().tolist() dict_to_append = { 'time': group_row.time, 'att_def_d':", "in df_t_grouped: game_id, play_id = name # if group does", "sys import pandas as pd from datetime import datetime from", "without 'pass_forward' df_t_grouped = df_t.groupby(['gameId', 'playId']) df_t_v3 = df_t.copy().sort_index() for", "'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y), 'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x, ball.y),", "on=['gameId', 'playId']) # Remove all events without 'pass_forward' df_t_grouped =", "'plays.csv')) games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv')) week_and_games = games_df[games_df.week == week_num]", "of events is < 3, skipping...') continue situation_df = group[group.event", "p_n_b_indices: keep_indices.extend(p_n_b_indices) clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv( save_file_path, index=False ) print('Normalize...')", "group.event.unique().tolist() dict_to_append = { 'time': group_row.time, 'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x,", "'playId']) df_t = tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId']) # Remove all", "gb_2: game_id, play_id, frameId = name if len(group) < GROUP_SIZE_MINIMUM:", "for name, group in gb_2: game_id, play_id, frameId = name", "check_group_event from src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation week_num = int(sys.argv[1]) data_v3", "datetime from settings import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION from src.features.helpers.processing import", "import datetime from settings import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION from src.features.helpers.processing", "pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv')) games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv')) week_and_games = games_df[games_df.week ==", "print('Getting closest players...') keep_indices = [] for name, group in", "series ball_row = situation_df[situation_df.team == 'football'].head(1) # remove ball player_situation_df", "event_3rd] # convert dataframe into series ball_row = situation_df[situation_df.team ==", "'team', 'gameId', 'playId', 'frameId', 'isDefensivePI' ]] gb_2 = clean_df.groupby(['gameId', 'playId',", "'frameId', 'isDefensivePI' ] ) GROUP_SIZE_MINIMUM = 3 for name, group", "clean_df[[ 'time', 'x', 'y', 's', 'o', 'dir', 'event', 'team', 'gameId',", "ball.y), 'att_s': p_att.s, 'def_s': p_def.s, 'ball_s': ball.s, 'att_a': p_att.a, 'def_a':", "week_num, 'gameId': group_row.gameId, 'playId': group_row.playId, 'frameId': group_row.frameId, 'isDefensivePI': group_row.isDefensivePI }", "== week_num] tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv')) print(\"Data loaded. Start processing", "df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment = None gb = df_t_v3_s.groupby(['gameId', 'playId']) print('Getting closest", "axis=1 ) clean_df['teamSituation'] = clean_df.apply( lambda row: calculate_team_sitation(row), axis=1 )", "'att_a': p_att.a, 'def_a': p_def.a, 'ball_a': ball.a, 'att_o': p_att.o, 'def_o': p_def.o,", "'def_s', 'ball_s', 'att_o', 'def_o', 'att_dir', 'def_dir', 'event', 'gameId', 'playId', 'frameId',", "== 'football'].head(1) # remove ball player_situation_df = situation_df[situation_df.team != 'football']", "in gb: game_id, play_id = name try: event_3rd = group.event.unique()[2]", "{ 'time': group_row.time, 'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y), 'att_ball_d': calculate_distance(p_att.x,", "data_v3.get_step1_checkpoint_path(week_num) try: clean_df = pd.read_csv(save_file_path) save_file_exists = True except FileNotFoundError:", "events is < 3, skipping...') continue situation_df = group[group.event ==", "group_events = group.event.unique().tolist() dict_to_append = { 'time': group_row.time, 'att_def_d': calculate_distance(p_att.x,", "contain pass forward, drop it if all(group.event != 'pass_forward'): df_t_v3", "except ValueError: print('Value Error raised. This group will be skipped.')", "'def_dir': p_def.dir, 'event': group_row.event, 'pass_arrived': check_group_event(group_events, 'pass_arrived'), 'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'),", "= df_t_v3_s.reset_index(drop=True) df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId']) # remove all values", "None gb = df_t_v3_s.groupby(['gameId', 'playId']) print('Getting closest players...') keep_indices =", "= normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession'] = clean_df.apply( lambda row: home_has_possession(row), axis=1 )", "3, skipping...') continue situation_df = group[group.event == event_3rd] # convert", "keep_indices = [] for name, group in gb: game_id, play_id", "name try: event_3rd = group.event.unique()[2] except IndexError: print('Number of events", "'first_contact': check_group_event(group_events, 'first_contact'), 'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'), 'out_of_bounds': check_group_event(group_events, 'out_of_bounds'), 'week':", "= group[group.event == 'pass_forward'].index.min() - 1 remove_start = group.index.min() df_t_v3_s", "convert dataframe into series ball_row = situation_df[situation_df.team == 'football'].head(1) #", "'out_of_bounds'), 'week': week_num, 'gameId': group_row.gameId, 'playId': group_row.playId, 'frameId': group_row.frameId, 'isDefensivePI':", "all values before pass forward event...\") for name, group in", "is < 3, skipping...') continue situation_df = group[group.event == event_3rd]", "pd from datetime import datetime from settings import RAW_DATA_DIR, DataV3,", "settings import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION from src.features.helpers.processing import add_missing_timestamp_values from", "'tackle'), 'first_contact': check_group_event(group_events, 'first_contact'), 'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'), 'out_of_bounds': check_group_event(group_events, 'out_of_bounds'),", "not contain pass forward, drop it if all(group.event != 'pass_forward'):", "import get_closest_players, get_players_and_ball_indices, calculate_distance, \\ normalize_according_to_play_direction, check_group_event from src.features.helpers.processing_v4 import", "!= play_id)] df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event']) df_t_v3_s =", "pd.read_csv(save_file_path) save_file_exists = True except FileNotFoundError: save_file_exists = False if", "df_t = tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId']) # Remove all events", "check_group_event(group_events, 'pass_outcome_incomplete'), 'out_of_bounds': check_group_event(group_events, 'out_of_bounds'), 'week': week_num, 'gameId': group_row.gameId, 'playId':", "= df_t.groupby(['gameId', 'playId']) df_t_v3 = df_t.copy().sort_index() for name, group in", "forward, drop it if all(group.event != 'pass_forward'): df_t_v3 = df_t_v3[(df_t_v3.gameId", "< 3, skipping...') continue situation_df = group[group.event == event_3rd] #", "DataV3, DATA_V3_SUBVERSION from src.features.helpers.processing import add_missing_timestamp_values from src.features.helpers.processing_v3 import get_closest_players,", "columns=[ 'time', 'att_def_d', 'att_ball_d', 'def_ball_d', 'att_s', 'def_s', 'ball_s', 'att_o', 'def_o',", "= name if len(group) < GROUP_SIZE_MINIMUM: continue ball = group[group.teamSituation", "normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession'] = clean_df.apply( lambda row: home_has_possession(row), axis=1 ) clean_df['teamSituation']", "data_v3 = DataV3(DATA_V3_SUBVERSION) save_file_path = data_v3.get_step1_checkpoint_path(week_num) try: clean_df = pd.read_csv(save_file_path)", "games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv')) week_and_games = games_df[games_df.week == week_num] tracking_df", "= { 'time': group_row.time, 'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y), 'att_ball_d':", "'first_contact'), 'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'), 'out_of_bounds': check_group_event(group_events, 'out_of_bounds'), 'week': week_num, 'gameId':", "are NaN calc_df = pd.DataFrame( columns=[ 'time', 'att_def_d', 'att_ball_d', 'def_ball_d',", "# ball direction and orientation are NaN calc_df = pd.DataFrame(", "import os import sys import pandas as pd from datetime", "df_t_v3_s.reset_index(drop=True) df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId']) # remove all values before", "print('Creating features...') min_df = clean_df[[ 'time', 'x', 'y', 's', 'o',", "try: clean_df = pd.read_csv(save_file_path) save_file_exists = True except FileNotFoundError: save_file_exists", "data\") play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv')) games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv')) week_and_games", "'playId', 'frameId', 'isDefensivePI' ]] gb_2 = clean_df.groupby(['gameId', 'playId', 'frameId']) #", "df_t.groupby(['gameId', 'playId']) df_t_v3 = df_t.copy().sort_index() for name, group in df_t_grouped:", "'isDefensivePI' ] ) GROUP_SIZE_MINIMUM = 3 for name, group in", "all(group.event != 'pass_forward'): df_t_v3 = df_t_v3[(df_t_v3.gameId != game_id) | (df_t_v3.playId", "= clean_df.apply( lambda row: home_has_possession(row), axis=1 ) clean_df['teamSituation'] = clean_df.apply(", "try: event_3rd = group.event.unique()[2] except IndexError: print('Number of events is", "src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation week_num = int(sys.argv[1]) data_v3 = DataV3(DATA_V3_SUBVERSION)", "calc_df = pd.DataFrame( columns=[ 'time', 'att_def_d', 'att_ball_d', 'def_ball_d', 'att_s', 'def_s',", "week_num = int(sys.argv[1]) data_v3 = DataV3(DATA_V3_SUBVERSION) save_file_path = data_v3.get_step1_checkpoint_path(week_num) try:", "'football'] try: p1, p2 = get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item()) except ValueError:", "Remove all events without 'pass_forward' df_t_grouped = df_t.groupby(['gameId', 'playId']) df_t_v3", "'playId', 'frameId']) # ball direction and orientation are NaN calc_df", "= pd.read_csv(save_file_path) save_file_exists = True except FileNotFoundError: save_file_exists = False", "'time', 'att_def_d', 'att_ball_d', 'def_ball_d', 'att_s', 'def_s', 'ball_s', 'att_o', 'def_o', 'att_dir',", "from settings import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION from src.features.helpers.processing import add_missing_timestamp_values", "drop it if all(group.event != 'pass_forward'): df_t_v3 = df_t_v3[(df_t_v3.gameId !=", "timestamps\") tracking_df = add_missing_timestamp_values(tracking_df) games_n_plays_df = play_df.merge(week_and_games, how='inner', on='gameId') m_grouped", "'def_ball_d', 'att_s', 'def_s', 'ball_s', 'att_o', 'def_o', 'att_dir', 'def_dir', 'event', 'gameId',", "p_att = group[group.teamSituation == 'attacking'].head(1).squeeze() p_def = group[group.teamSituation == 'defending'].head(1).squeeze()", "ball = group[group.teamSituation == 'football'].head(1).squeeze() p_att = group[group.teamSituation == 'attacking'].head(1).squeeze()", "== event_3rd] # convert dataframe into series ball_row = situation_df[situation_df.team", "players...') keep_indices = [] for name, group in gb: game_id,", "'pass_forward' df_t_grouped = df_t.groupby(['gameId', 'playId']) df_t_v3 = df_t.copy().sort_index() for name,", "continue p_n_b_indices = get_players_and_ball_indices(group, p1, p2) if p_n_b_indices: keep_indices.extend(p_n_b_indices) clean_df", "p2 = get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item()) except ValueError: print('Value Error raised.", "calculate_distance(p_att.x, p_att.y, ball.x, ball.y), 'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x, ball.y), 'att_s':", "for name, group in df_t_grouped: game_id, play_id = name #", "game_id, play_id, frameId = name if len(group) < GROUP_SIZE_MINIMUM: continue", "be skipped.') continue p_n_b_indices = get_players_and_ball_indices(group, p1, p2) if p_n_b_indices:", "'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'), 'tackle': check_group_event(group_events, 'tackle'), 'first_contact': check_group_event(group_events, 'first_contact'), 'pass_outcome_incomplete':", "group_row.isDefensivePI } calc_df = calc_df.append( dict_to_append, ignore_index=True ) print(\"Saving data...\")", "calc_df.append( dict_to_append, ignore_index=True ) print(\"Saving data...\") calc_df.to_csv( data_v3.get_step1_end_path(week_num), index=False )", "group in gb: game_id, play_id = name try: event_3rd =", "calculate_distance, \\ normalize_according_to_play_direction, check_group_event from src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation week_num", "try: p1, p2 = get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item()) except ValueError: print('Value", "remove_start = group.index.min() df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment = None gb", "group_row.time, 'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y), 'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x,", "save_file_exists = False if not save_file_exists: print(\"Started loading data\") play_df", "loading data\") play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv')) games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv'))", "name, group in df_t_grouped: game_id, play_id = name pass_forward_frame_id =", "== 'defending'].head(1).squeeze() group_row = group.head(1).squeeze() group_events = group.event.unique().tolist() dict_to_append =", "p1, p2) if p_n_b_indices: keep_indices.extend(p_n_b_indices) clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)] clean_df.to_csv( save_file_path,", "index=False ) print('Normalize...') clean_df = normalize_according_to_play_direction(clean_df) clean_df['homeHasPossession'] = clean_df.apply( lambda", "name, group in gb: game_id, play_id = name try: event_3rd", "p_def.o, 'att_dir': p_att.dir, 'def_dir': p_def.dir, 'event': group_row.event, 'pass_arrived': check_group_event(group_events, 'pass_arrived'),", "import home_has_possession, calculate_team_sitation week_num = int(sys.argv[1]) data_v3 = DataV3(DATA_V3_SUBVERSION) save_file_path", "!= game_id) | (df_t_v3.playId != play_id)] df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId',", "ball.x, ball.y), 'att_s': p_att.s, 'def_s': p_def.s, 'ball_s': ball.s, 'att_a': p_att.a,", "'def_o', 'att_dir', 'def_dir', 'event', 'gameId', 'playId', 'frameId', 'isDefensivePI' ] )", "] ) GROUP_SIZE_MINIMUM = 3 for name, group in gb_2:", "lambda row: home_has_possession(row), axis=1 ) clean_df['teamSituation'] = clean_df.apply( lambda row:", "events without 'pass_forward' df_t_grouped = df_t.groupby(['gameId', 'playId']) df_t_v3 = df_t.copy().sort_index()", "= df_t_v3_s.groupby(['gameId', 'playId']) # remove all values before 'pass_forward' print(\"Removing", "p_n_b_indices = get_players_and_ball_indices(group, p1, p2) if p_n_b_indices: keep_indices.extend(p_n_b_indices) clean_df =", "p_def.y), 'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x, ball.y), 'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x,", "'att_o', 'def_o', 'att_dir', 'def_dir', 'event', 'gameId', 'playId', 'frameId', 'isDefensivePI' ]", "'ball_s', 'att_o', 'def_o', 'att_dir', 'def_dir', 'event', 'gameId', 'playId', 'frameId', 'isDefensivePI'", "= group.head(1).squeeze() group_events = group.event.unique().tolist() dict_to_append = { 'time': group_row.time,", "'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x, ball.y), 'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x, ball.y),", "game_id, play_id = name # if group does not contain", "for name, group in df_t_grouped: game_id, play_id = name pass_forward_frame_id", "situation_df = group[group.event == event_3rd] # convert dataframe into series", "== 'pass_forward'].index.min() - 1 remove_start = group.index.min() df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index)", "ball_row.x.item(), ball_row.y.item()) except ValueError: print('Value Error raised. This group will", "play_id = name # if group does not contain pass", "= name try: event_3rd = group.event.unique()[2] except IndexError: print('Number of", "add_missing_timestamp_values from src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices, calculate_distance, \\ normalize_according_to_play_direction, check_group_event", "= pd.DataFrame( columns=[ 'time', 'att_def_d', 'att_ball_d', 'def_ball_d', 'att_s', 'def_s', 'ball_s',", "get_players_and_ball_indices, calculate_distance, \\ normalize_according_to_play_direction, check_group_event from src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation", "ball.s, 'att_a': p_att.a, 'def_a': p_def.a, 'ball_a': ball.a, 'att_o': p_att.o, 'def_o':", "'pass_arrived': check_group_event(group_events, 'pass_arrived'), 'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'), 'tackle': check_group_event(group_events, 'tackle'), 'first_contact':", "= DataV3(DATA_V3_SUBVERSION) save_file_path = data_v3.get_step1_checkpoint_path(week_num) try: clean_df = pd.read_csv(save_file_path) save_file_exists", "group[group.teamSituation == 'defending'].head(1).squeeze() group_row = group.head(1).squeeze() group_events = group.event.unique().tolist() dict_to_append", "df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment = None gb = df_t_v3_s.groupby(['gameId', 'playId'])", "name pass_forward_frame_id = group[group.event == 'pass_forward'].index.min() - 1 remove_start =", "RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION from src.features.helpers.processing import add_missing_timestamp_values from src.features.helpers.processing_v3 import", "= group.event.unique().tolist() dict_to_append = { 'time': group_row.time, 'att_def_d': calculate_distance(p_att.x, p_att.y,", "p_att.o, 'def_o': p_def.o, 'att_dir': p_att.dir, 'def_dir': p_def.dir, 'event': group_row.event, 'pass_arrived':", "df_t_grouped: game_id, play_id = name pass_forward_frame_id = group[group.event == 'pass_forward'].index.min()", "how='left', on=['gameId', 'playId']) # Remove all events without 'pass_forward' df_t_grouped", "# convert dataframe into series ball_row = situation_df[situation_df.team == 'football'].head(1)", "clean_df['homeHasPossession'] = clean_df.apply( lambda row: home_has_possession(row), axis=1 ) clean_df['teamSituation'] =", "p_def.y, ball.x, ball.y), 'att_s': p_att.s, 'def_s': p_def.s, 'ball_s': ball.s, 'att_a':", "df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event']) df_t_v3_s = df_t_v3_s.reset_index(drop=True) df_t_grouped = df_t_v3_s.groupby(['gameId',", "Start processing timestamps\") tracking_df = add_missing_timestamp_values(tracking_df) games_n_plays_df = play_df.merge(week_and_games, how='inner',", "'event': group_row.event, 'pass_arrived': check_group_event(group_events, 'pass_arrived'), 'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'), 'tackle': check_group_event(group_events,", "src.features.helpers.processing import add_missing_timestamp_values from src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices, calculate_distance, \\", "player_situation_df = situation_df[situation_df.team != 'football'] try: p1, p2 = get_closest_players(player_situation_df,", "(df_t_v3.playId != play_id)] df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event']) df_t_v3_s", "== 'football'].head(1).squeeze() p_att = group[group.teamSituation == 'attacking'].head(1).squeeze() p_def = group[group.teamSituation", "= pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv')) games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv')) week_and_games = games_df[games_df.week", "remove all values before 'pass_forward' print(\"Removing all values before pass", "df_t_grouped: game_id, play_id = name # if group does not", "p_att.dir, 'def_dir': p_def.dir, 'event': group_row.event, 'pass_arrived': check_group_event(group_events, 'pass_arrived'), 'pass_outcome_caught': check_group_event(group_events,", "'y', 's', 'o', 'dir', 'event', 'team', 'gameId', 'playId', 'frameId', 'isDefensivePI'", "if group does not contain pass forward, drop it if", "df_t.copy().sort_index() for name, group in df_t_grouped: game_id, play_id = name", "if not save_file_exists: print(\"Started loading data\") play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv'))", "= group.event.unique()[2] except IndexError: print('Number of events is < 3,", "'playId': group_row.playId, 'frameId': group_row.frameId, 'isDefensivePI': group_row.isDefensivePI } calc_df = calc_df.append(", "all values before 'pass_forward' print(\"Removing all values before pass forward", "= df_t_v3[(df_t_v3.gameId != game_id) | (df_t_v3.playId != play_id)] df_t_v3_s =", ") GROUP_SIZE_MINIMUM = 3 for name, group in gb_2: game_id,", "= get_players_and_ball_indices(group, p1, p2) if p_n_b_indices: keep_indices.extend(p_n_b_indices) clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)]", ") print(\"Saving data...\") calc_df.to_csv( data_v3.get_step1_end_path(week_num), index=False ) print(f'End time: {datetime.now().strftime(\"%H:%M:%S\")}')", "pass forward, drop it if all(group.event != 'pass_forward'): df_t_v3 =", "clean_df = pd.read_csv(save_file_path) save_file_exists = True except FileNotFoundError: save_file_exists =", "check_group_event(group_events, 'pass_arrived'), 'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'), 'tackle': check_group_event(group_events, 'tackle'), 'first_contact': check_group_event(group_events,", "= None gb = df_t_v3_s.groupby(['gameId', 'playId']) print('Getting closest players...') keep_indices", "play_id = name try: event_3rd = group.event.unique()[2] except IndexError: print('Number", "'event', 'gameId', 'playId', 'frameId', 'isDefensivePI' ] ) GROUP_SIZE_MINIMUM = 3", "p_def.x, p_def.y), 'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x, ball.y), 'def_ball_d': calculate_distance(p_def.x, p_def.y,", "calculate_distance(p_def.x, p_def.y, ball.x, ball.y), 'att_s': p_att.s, 'def_s': p_def.s, 'ball_s': ball.s,", "'pass_outcome_incomplete'), 'out_of_bounds': check_group_event(group_events, 'out_of_bounds'), 'week': week_num, 'gameId': group_row.gameId, 'playId': group_row.playId,", "play_id)] df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event']) df_t_v3_s = df_t_v3_s.reset_index(drop=True)", "will be skipped.') continue p_n_b_indices = get_players_and_ball_indices(group, p1, p2) if", "print(\"Data loaded. Start processing timestamps\") tracking_df = add_missing_timestamp_values(tracking_df) games_n_plays_df =", "group_row.gameId, 'playId': group_row.playId, 'frameId': group_row.frameId, 'isDefensivePI': group_row.isDefensivePI } calc_df =", "= group.index.min() df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment = None gb =", "= df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index) pd.options.mode.chained_assignment = None gb = df_t_v3_s.groupby(['gameId', 'playId']) print('Getting", "it if all(group.event != 'pass_forward'): df_t_v3 = df_t_v3[(df_t_v3.gameId != game_id)", "add_missing_timestamp_values(tracking_df) games_n_plays_df = play_df.merge(week_and_games, how='inner', on='gameId') m_grouped = games_n_plays_df.groupby(['gameId', 'playId'])", "= play_df.merge(week_and_games, how='inner', on='gameId') m_grouped = games_n_plays_df.groupby(['gameId', 'playId']) df_t =", ") clean_df['teamSituation'] = clean_df.apply( lambda row: calculate_team_sitation(row), axis=1 ) print('Creating", "'out_of_bounds': check_group_event(group_events, 'out_of_bounds'), 'week': week_num, 'gameId': group_row.gameId, 'playId': group_row.playId, 'frameId':", "lambda row: calculate_team_sitation(row), axis=1 ) print('Creating features...') min_df = clean_df[[", "IndexError: print('Number of events is < 3, skipping...') continue situation_df", "'o', 'dir', 'event', 'team', 'gameId', 'playId', 'frameId', 'isDefensivePI' ]] gb_2", "week_and_games = games_df[games_df.week == week_num] tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv')) print(\"Data", "loaded. Start processing timestamps\") tracking_df = add_missing_timestamp_values(tracking_df) games_n_plays_df = play_df.merge(week_and_games,", "check_group_event(group_events, 'out_of_bounds'), 'week': week_num, 'gameId': group_row.gameId, 'playId': group_row.playId, 'frameId': group_row.frameId,", "before 'pass_forward' print(\"Removing all values before pass forward event...\") for", "= False if not save_file_exists: print(\"Started loading data\") play_df =", "save_file_exists = True except FileNotFoundError: save_file_exists = False if not", "'games.csv')) week_and_games = games_df[games_df.week == week_num] tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv'))", "'playId']) print('Getting closest players...') keep_indices = [] for name, group", "'att_s', 'def_s', 'ball_s', 'att_o', 'def_o', 'att_dir', 'def_dir', 'event', 'gameId', 'playId',", "True except FileNotFoundError: save_file_exists = False if not save_file_exists: print(\"Started", "} calc_df = calc_df.append( dict_to_append, ignore_index=True ) print(\"Saving data...\") calc_df.to_csv(", "= games_df[games_df.week == week_num] tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv')) print(\"Data loaded.", "'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'), 'out_of_bounds': check_group_event(group_events, 'out_of_bounds'), 'week': week_num, 'gameId': group_row.gameId,", "group[group.teamSituation == 'football'].head(1).squeeze() p_att = group[group.teamSituation == 'attacking'].head(1).squeeze() p_def =", "= data_v3.get_step1_checkpoint_path(week_num) try: clean_df = pd.read_csv(save_file_path) save_file_exists = True except", "# Remove all events without 'pass_forward' df_t_grouped = df_t.groupby(['gameId', 'playId'])", "'ball_s': ball.s, 'att_a': p_att.a, 'def_a': p_def.a, 'ball_a': ball.a, 'att_o': p_att.o,", "calc_df = calc_df.append( dict_to_append, ignore_index=True ) print(\"Saving data...\") calc_df.to_csv( data_v3.get_step1_end_path(week_num),", "name, group in df_t_grouped: game_id, play_id = name # if", "calculate_team_sitation(row), axis=1 ) print('Creating features...') min_df = clean_df[[ 'time', 'x',", "p_def = group[group.teamSituation == 'defending'].head(1).squeeze() group_row = group.head(1).squeeze() group_events =", "[] for name, group in gb: game_id, play_id = name", "ball player_situation_df = situation_df[situation_df.team != 'football'] try: p1, p2 =", "forward event...\") for name, group in df_t_grouped: game_id, play_id =", "save_file_path = data_v3.get_step1_checkpoint_path(week_num) try: clean_df = pd.read_csv(save_file_path) save_file_exists = True", "remove ball player_situation_df = situation_df[situation_df.team != 'football'] try: p1, p2", "group[group.event == 'pass_forward'].index.min() - 1 remove_start = group.index.min() df_t_v3_s =", "tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv')) print(\"Data loaded. Start processing timestamps\") tracking_df", "orientation are NaN calc_df = pd.DataFrame( columns=[ 'time', 'att_def_d', 'att_ball_d',", "= calc_df.append( dict_to_append, ignore_index=True ) print(\"Saving data...\") calc_df.to_csv( data_v3.get_step1_end_path(week_num), index=False" ]
[ "= re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for line in lines: m = matcher.match(line) if", "import getopt import os import re import sys import script_utils", "global flag_reverse try: optlist, _ = getopt.getopt(sys.argv[1:], \"dr\") except getopt.GetoptError", "m = matcher.match(line) if m: lnum = int(m.group(1)) afile =", "lines = sys.stdin.readlines() lnum = -1 matcher = re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for", "exit.\"\"\" if msgarg: sys.stderr.write(\"error: %s\\n\" % msgarg) print \"\"\"\\ usage:", "Read lines = sys.stdin.readlines() lnum = -1 matcher = re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\")", "lnum = -1 matcher = re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for line in lines:", "os.path.basename(sys.argv[0]) sys.exit(1) def parse_args(): \"\"\"Command line argument parsing.\"\"\" global flag_reverse", "import re import sys import script_utils as u flag_reverse =", "flag_reverse try: optlist, _ = getopt.getopt(sys.argv[1:], \"dr\") except getopt.GetoptError as", "line in lines: m = matcher.match(line) if m: lnum =", "flag_reverse = False # Setup u.setdeflanglocale() parse_args() # Read lines", "argument parsing.\"\"\" global flag_reverse try: optlist, _ = getopt.getopt(sys.argv[1:], \"dr\")", "as err: # unrecognized option usage(str(err)) for opt, _ in", "print \"\"\"\\ usage: %s [options] < input > output options:", "%s\\n\" % msgarg) print \"\"\"\\ usage: %s [options] < input", "matcher = re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for line in lines: m = matcher.match(line)", "and exit.\"\"\" if msgarg: sys.stderr.write(\"error: %s\\n\" % msgarg) print \"\"\"\\", "Read std input, then annotate each line with line number", "int(m.group(1)) afile = m.group(2) print \"<%s:%d>\" % (afile, lnum) continue", "debugging. \"\"\" import getopt import os import re import sys", "try: optlist, _ = getopt.getopt(sys.argv[1:], \"dr\") except getopt.GetoptError as err:", "unrecognized option usage(str(err)) for opt, _ in optlist: if opt", "Setup u.setdeflanglocale() parse_args() # Read lines = sys.stdin.readlines() lnum =", "output. Useful in the context of compiler debugging. \"\"\" import", "option usage(str(err)) for opt, _ in optlist: if opt ==", "context of compiler debugging. \"\"\" import getopt import os import", "flag_reverse = True def usage(msgarg): \"\"\"Print usage and exit.\"\"\" if", "msgarg) print \"\"\"\\ usage: %s [options] < input > output", "sys.stdin.readlines() lnum = -1 matcher = re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for line in", "(afile, lnum) continue print \"%d:%s\" % (lnum, line.strip()) lnum +=", "= -1 matcher = re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for line in lines: m", "input, then annotate each line with line number based on", "each line with line number based on previous expanded line", "parse_args() # Read lines = sys.stdin.readlines() lnum = -1 matcher", "source input with line numbers. Read std input, then annotate", "m: lnum = int(m.group(1)) afile = m.group(2) print \"<%s:%d>\" %", "def parse_args(): \"\"\"Command line argument parsing.\"\"\" global flag_reverse try: optlist,", "if opt == \"-d\": u.increment_verbosity() elif opt == \"-r\": flag_reverse", "std input, then annotate each line with line number based", "False # Setup u.setdeflanglocale() parse_args() # Read lines = sys.stdin.readlines()", "print \"<%s:%d>\" % (afile, lnum) continue print \"%d:%s\" % (lnum,", "= False # Setup u.setdeflanglocale() parse_args() # Read lines =", "re import sys import script_utils as u flag_reverse = True", "of compiler debugging. \"\"\" import getopt import os import re", "level \"\"\" % os.path.basename(sys.argv[0]) sys.exit(1) def parse_args(): \"\"\"Command line argument", "< input > output options: -d increase debug msg verbosity", "getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt, _", "script_utils as u flag_reverse = True def usage(msgarg): \"\"\"Print usage", "True def usage(msgarg): \"\"\"Print usage and exit.\"\"\" if msgarg: sys.stderr.write(\"error:", "elif opt == \"-r\": flag_reverse = False # Setup u.setdeflanglocale()", "import script_utils as u flag_reverse = True def usage(msgarg): \"\"\"Print", "opt, _ in optlist: if opt == \"-d\": u.increment_verbosity() elif", "on previous expanded line directives from -E output. Useful in", "== \"-r\": flag_reverse = False # Setup u.setdeflanglocale() parse_args() #", "debug msg verbosity level \"\"\" % os.path.basename(sys.argv[0]) sys.exit(1) def parse_args():", "verbosity level \"\"\" % os.path.basename(sys.argv[0]) sys.exit(1) def parse_args(): \"\"\"Command line", "previous expanded line directives from -E output. Useful in the", "getopt.getopt(sys.argv[1:], \"dr\") except getopt.GetoptError as err: # unrecognized option usage(str(err))", "msgarg: sys.stderr.write(\"error: %s\\n\" % msgarg) print \"\"\"\\ usage: %s [options]", "line with line number based on previous expanded line directives", "parsing.\"\"\" global flag_reverse try: optlist, _ = getopt.getopt(sys.argv[1:], \"dr\") except", "increase debug msg verbosity level \"\"\" % os.path.basename(sys.argv[0]) sys.exit(1) def", "if m: lnum = int(m.group(1)) afile = m.group(2) print \"<%s:%d>\"", "compiler debugging. \"\"\" import getopt import os import re import", "# Setup u.setdeflanglocale() parse_args() # Read lines = sys.stdin.readlines() lnum", "with line numbers. Read std input, then annotate each line", "= int(m.group(1)) afile = m.group(2) print \"<%s:%d>\" % (afile, lnum)", "optlist, _ = getopt.getopt(sys.argv[1:], \"dr\") except getopt.GetoptError as err: #", "from -E output. Useful in the context of compiler debugging.", "-E preprocessed source input with line numbers. Read std input,", "\"-r\": flag_reverse = False # Setup u.setdeflanglocale() parse_args() # Read", "matcher.match(line) if m: lnum = int(m.group(1)) afile = m.group(2) print", "\"-d\": u.increment_verbosity() elif opt == \"-r\": flag_reverse = False #", "err: # unrecognized option usage(str(err)) for opt, _ in optlist:", "in optlist: if opt == \"-d\": u.increment_verbosity() elif opt ==", "-1 matcher = re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for line in lines: m =", "msg verbosity level \"\"\" % os.path.basename(sys.argv[0]) sys.exit(1) def parse_args(): \"\"\"Command", "input > output options: -d increase debug msg verbosity level", "getopt import os import re import sys import script_utils as", "def usage(msgarg): \"\"\"Print usage and exit.\"\"\" if msgarg: sys.stderr.write(\"error: %s\\n\"", "# Read lines = sys.stdin.readlines() lnum = -1 matcher =", "== \"-d\": u.increment_verbosity() elif opt == \"-r\": flag_reverse = False", "#!/usr/bin/python \"\"\"Annotates -E preprocessed source input with line numbers. Read", "-d increase debug msg verbosity level \"\"\" % os.path.basename(sys.argv[0]) sys.exit(1)", "opt == \"-d\": u.increment_verbosity() elif opt == \"-r\": flag_reverse =", "preprocessed source input with line numbers. Read std input, then", "line numbers. Read std input, then annotate each line with", "Useful in the context of compiler debugging. \"\"\" import getopt", "m.group(2) print \"<%s:%d>\" % (afile, lnum) continue print \"%d:%s\" %", "with line number based on previous expanded line directives from", "lnum) continue print \"%d:%s\" % (lnum, line.strip()) lnum += 1", "input with line numbers. Read std input, then annotate each", "output options: -d increase debug msg verbosity level \"\"\" %", "% (afile, lnum) continue print \"%d:%s\" % (lnum, line.strip()) lnum", "annotate each line with line number based on previous expanded", "%s [options] < input > output options: -d increase debug", "[options] < input > output options: -d increase debug msg", "in lines: m = matcher.match(line) if m: lnum = int(m.group(1))", "sys import script_utils as u flag_reverse = True def usage(msgarg):", "usage: %s [options] < input > output options: -d increase", "number based on previous expanded line directives from -E output.", "parse_args(): \"\"\"Command line argument parsing.\"\"\" global flag_reverse try: optlist, _", "line number based on previous expanded line directives from -E", "% msgarg) print \"\"\"\\ usage: %s [options] < input >", "if msgarg: sys.stderr.write(\"error: %s\\n\" % msgarg) print \"\"\"\\ usage: %s", "directives from -E output. Useful in the context of compiler", "import os import re import sys import script_utils as u", "except getopt.GetoptError as err: # unrecognized option usage(str(err)) for opt,", "= m.group(2) print \"<%s:%d>\" % (afile, lnum) continue print \"%d:%s\"", "afile = m.group(2) print \"<%s:%d>\" % (afile, lnum) continue print", "usage and exit.\"\"\" if msgarg: sys.stderr.write(\"error: %s\\n\" % msgarg) print", "usage(msgarg): \"\"\"Print usage and exit.\"\"\" if msgarg: sys.stderr.write(\"error: %s\\n\" %", "sys.exit(1) def parse_args(): \"\"\"Command line argument parsing.\"\"\" global flag_reverse try:", "\"\"\"Command line argument parsing.\"\"\" global flag_reverse try: optlist, _ =", "expanded line directives from -E output. Useful in the context", "= sys.stdin.readlines() lnum = -1 matcher = re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for line", "opt == \"-r\": flag_reverse = False # Setup u.setdeflanglocale() parse_args()", "u.setdeflanglocale() parse_args() # Read lines = sys.stdin.readlines() lnum = -1", "\"\"\"Annotates -E preprocessed source input with line numbers. Read std", "numbers. Read std input, then annotate each line with line", "for opt, _ in optlist: if opt == \"-d\": u.increment_verbosity()", "% os.path.basename(sys.argv[0]) sys.exit(1) def parse_args(): \"\"\"Command line argument parsing.\"\"\" global", "= True def usage(msgarg): \"\"\"Print usage and exit.\"\"\" if msgarg:", "_ = getopt.getopt(sys.argv[1:], \"dr\") except getopt.GetoptError as err: # unrecognized", "u flag_reverse = True def usage(msgarg): \"\"\"Print usage and exit.\"\"\"", "\"<%s:%d>\" % (afile, lnum) continue print \"%d:%s\" % (lnum, line.strip())", "for line in lines: m = matcher.match(line) if m: lnum", "\"dr\") except getopt.GetoptError as err: # unrecognized option usage(str(err)) for", "optlist: if opt == \"-d\": u.increment_verbosity() elif opt == \"-r\":", "os import re import sys import script_utils as u flag_reverse", "options: -d increase debug msg verbosity level \"\"\" % os.path.basename(sys.argv[0])", "# unrecognized option usage(str(err)) for opt, _ in optlist: if", "then annotate each line with line number based on previous", "lnum = int(m.group(1)) afile = m.group(2) print \"<%s:%d>\" % (afile,", "\"\"\"Print usage and exit.\"\"\" if msgarg: sys.stderr.write(\"error: %s\\n\" % msgarg)", "_ in optlist: if opt == \"-d\": u.increment_verbosity() elif opt", "-E output. Useful in the context of compiler debugging. \"\"\"", "line directives from -E output. Useful in the context of", "based on previous expanded line directives from -E output. Useful", "in the context of compiler debugging. \"\"\" import getopt import", "import sys import script_utils as u flag_reverse = True def", "line argument parsing.\"\"\" global flag_reverse try: optlist, _ = getopt.getopt(sys.argv[1:],", "\"\"\" import getopt import os import re import sys import", "usage(str(err)) for opt, _ in optlist: if opt == \"-d\":", "> output options: -d increase debug msg verbosity level \"\"\"", "re.compile(r\"^\\#\\s+(\\d+)\\s+\\\"(\\S+)\\\".*$\") for line in lines: m = matcher.match(line) if m:", "= matcher.match(line) if m: lnum = int(m.group(1)) afile = m.group(2)", "\"\"\" % os.path.basename(sys.argv[0]) sys.exit(1) def parse_args(): \"\"\"Command line argument parsing.\"\"\"", "u.increment_verbosity() elif opt == \"-r\": flag_reverse = False # Setup", "sys.stderr.write(\"error: %s\\n\" % msgarg) print \"\"\"\\ usage: %s [options] <", "the context of compiler debugging. \"\"\" import getopt import os", "\"\"\"\\ usage: %s [options] < input > output options: -d", "as u flag_reverse = True def usage(msgarg): \"\"\"Print usage and", "= getopt.getopt(sys.argv[1:], \"dr\") except getopt.GetoptError as err: # unrecognized option", "lines: m = matcher.match(line) if m: lnum = int(m.group(1)) afile" ]
[ "dam occupancy rate of İstanbul using these features.\", unsafe_allow_html=True) LSTM_model_name", "255)',plot_bgcolor = 'rgb(248, 248, 255)',barmode = \"stack\", xaxis = dict(title=\"Time\",", "RNNs fail. It has been so designed that thevanishing gradient", "import tensorflow as tf import numpy from utils.get_owm_data import get_open_weather_map_data", "px def app(): st.title(\"LSTM Model\") st.subheader('What does LSTM model do?')", "tensorflow as tf import numpy from utils.get_owm_data import get_open_weather_map_data from", "dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",), yaxis= dict(title=\"Dam Occupancy Rate (%)\",linecolor=\"#021C1E\")) line_chart= go.Scatter(x=date_list,", "import numpy from utils.get_owm_data import get_open_weather_map_data from utils.get_date import get_date_list_for_gmt", "bridged using LSTMs where they also handle noise, distributed representations,", "y=prediction_lstm, marker_color='rgb(0, 200, 200)' ) data.append(line_chart) fig= go.Figure(data=data, layout=layout) st.plotly_chart(fig)", "data from [OpenWeatherAPI](https://openweathermap.org/api). Model input features are *Rain*, *MaxTemp*, *MinTemp*,", "import tools import plotly.offline as py import plotly.express as px", "removed, while the training model is left unaltered. Long-time lags", "justify;'>LSTM is well-suited to classify, process and predict time series", "fail. It has been so designed that thevanishing gradient problem", "can store previous timestep information and this is how it", "*MinTemp*, *AvgWind*, *AvgHumidity* and *AvgPressure*. Model predicts 7 days dam", "unaltered. Long-time lags in certain problems are bridged using LSTMs", "occupancy rate of İstanbul using these features.\", unsafe_allow_html=True) LSTM_model_name =", "*AvgWind*, *AvgHumidity* and *AvgPressure*. Model predicts 7 days dam occupancy", "model do?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM networks are an extension of", "get_open_weather_map_data from utils.get_date import get_date_list_for_gmt import plotly.graph_objects as go from", "has been so designed that thevanishing gradient problem is almost", "while the training model is left unaltered. Long-time lags in", "justify;'>LSTM networks are an extension of recurrent neural networks (RNNs)", "from utils.get_date import get_date_list_for_gmt import plotly.graph_objects as go from plotly", "as px def app(): st.title(\"LSTM Model\") st.subheader('What does LSTM model", "\"stack\", xaxis = dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",), yaxis= dict(title=\"Dam Occupancy Rate", "and predict time series given time lags of unknown duration.", "unsafe_allow_html=True) st.subheader('Why we chose LSTM?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM is well-suited", "Occupancy Rate (%)\",linecolor=\"#021C1E\")) line_chart= go.Scatter(x=date_list, y=prediction_lstm, marker_color='rgb(0, 200, 200)' )", "import plotly.express as px def app(): st.title(\"LSTM Model\") st.subheader('What does", "we chose LSTM?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM is well-suited to classify,", "st.markdown(\"\"\"<p style='text-align: justify;'>LSTM is well-suited to classify, process and predict", "as py import plotly.express as px def app(): st.title(\"LSTM Model\")", "gives an advantage to LSTM over alternative RNNs, hidden Markov", "data = [] layout = go.Layout( title= \"<b>LSTM Dam Occupancy", "import plotly.offline as py import plotly.express as px def app():", "left unaltered. Long-time lags in certain problems are bridged using", "Model\") st.subheader('What does LSTM model do?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM networks", "weather data from [OpenWeatherAPI](https://openweathermap.org/api). Model input features are *Rain*, *MaxTemp*,", "model_lstm = tf.keras.models.load_model(LSTM_model_name) features = get_open_weather_map_data() prediction_lstm = model_lstm.predict(features) *", "100 prediction_lstm = prediction_lstm.ravel() date_list = get_date_list_for_gmt() data = []", "= 'rgb(248, 248, 255)',barmode = \"stack\", xaxis = dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor=", "= dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",), yaxis= dict(title=\"Dam Occupancy Rate (%)\",linecolor=\"#021C1E\")) line_chart=", "is almost completely removed, while the training model is left", "= prediction_lstm.ravel() date_list = get_date_list_for_gmt() data = [] layout =", "'rgb(248, 248, 255)',plot_bgcolor = 'rgb(248, 248, 255)',barmode = \"stack\", xaxis", "line_chart= go.Scatter(x=date_list, y=prediction_lstm, marker_color='rgb(0, 200, 200)' ) data.append(line_chart) fig= go.Figure(data=data,", "time series given time lags of unknown duration. Relative insensitivity", "duration. Relative insensitivity to gap length gives an advantage to", "and *AvgPressure*. Model predicts 7 days dam occupancy rate of", "and other sequence learningmethods. In addition, LSTM works great because", "lags of unknown duration. Relative insensitivity to gap length gives", "this is how it learns.</p>\"\"\", unsafe_allow_html=True) st.subheader('LSTM model input and", "is well-suited to classify, process and predict time series given", "models and other sequence learningmethods. In addition, LSTM works great", "unsafe_allow_html=True) st.subheader('LSTM model input and output') st.markdown(\"Model input is 7", "st import tensorflow as tf import numpy from utils.get_owm_data import", "get_open_weather_map_data() prediction_lstm = model_lstm.predict(features) * 100 prediction_lstm = prediction_lstm.ravel() date_list", "LSTM works great because LSTM cells have a memory that", "noise, distributed representations, and continuous values.</p>\"\"\", unsafe_allow_html=True) st.subheader('Why we chose", "Dam Occupancy Forecasting Plot</b>\",paper_bgcolor = 'rgb(248, 248, 255)',plot_bgcolor = 'rgb(248,", "Occupancy Forecasting Plot</b>\",paper_bgcolor = 'rgb(248, 248, 255)',plot_bgcolor = 'rgb(248, 248,", "gap length gives an advantage to LSTM over alternative RNNs,", "from [OpenWeatherAPI](https://openweathermap.org/api). Model input features are *Rain*, *MaxTemp*, *MinTemp*, *AvgWind*,", "input features are *Rain*, *MaxTemp*, *MinTemp*, *AvgWind*, *AvgHumidity* and *AvgPressure*.", "(RNNs) mainly introduced to handle situations where RNNs fail. It", "is 7 days daily weather data from [OpenWeatherAPI](https://openweathermap.org/api). Model input", "Model predicts 7 days dam occupancy rate of İstanbul using", "'rgb(248, 248, 255)',barmode = \"stack\", xaxis = dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",),", "tf import numpy from utils.get_owm_data import get_open_weather_map_data from utils.get_date import", "= 'models/LSTM_model.h5' model_lstm = tf.keras.models.load_model(LSTM_model_name) features = get_open_weather_map_data() prediction_lstm =", "timestep information and this is how it learns.</p>\"\"\", unsafe_allow_html=True) st.subheader('LSTM", "insensitivity to gap length gives an advantage to LSTM over", "networks are an extension of recurrent neural networks (RNNs) mainly", "it learns.</p>\"\"\", unsafe_allow_html=True) st.subheader('LSTM model input and output') st.markdown(\"Model input", "using these features.\", unsafe_allow_html=True) LSTM_model_name = 'models/LSTM_model.h5' model_lstm = tf.keras.models.load_model(LSTM_model_name)", "lags in certain problems are bridged using LSTMs where they", "*AvgHumidity* and *AvgPressure*. Model predicts 7 days dam occupancy rate", "[] layout = go.Layout( title= \"<b>LSTM Dam Occupancy Forecasting Plot</b>\",paper_bgcolor", "= get_open_weather_map_data() prediction_lstm = model_lstm.predict(features) * 100 prediction_lstm = prediction_lstm.ravel()", "import streamlit as st import tensorflow as tf import numpy", "dict(title=\"Dam Occupancy Rate (%)\",linecolor=\"#021C1E\")) line_chart= go.Scatter(x=date_list, y=prediction_lstm, marker_color='rgb(0, 200, 200)'", "learns.</p>\"\"\", unsafe_allow_html=True) st.subheader('LSTM model input and output') st.markdown(\"Model input is", "st.markdown(\"Model input is 7 days daily weather data from [OpenWeatherAPI](https://openweathermap.org/api).", "Plot</b>\",paper_bgcolor = 'rgb(248, 248, 255)',plot_bgcolor = 'rgb(248, 248, 255)',barmode =", "to gap length gives an advantage to LSTM over alternative", "are bridged using LSTMs where they also handle noise, distributed", "have a memory that can store previous timestep information and", "previous timestep information and this is how it learns.</p>\"\"\", unsafe_allow_html=True)", "that thevanishing gradient problem is almost completely removed, while the", "(%)\",linecolor=\"#021C1E\")) line_chart= go.Scatter(x=date_list, y=prediction_lstm, marker_color='rgb(0, 200, 200)' ) data.append(line_chart) fig=", "tools import plotly.offline as py import plotly.express as px def", "well-suited to classify, process and predict time series given time", "alternative RNNs, hidden Markov models and other sequence learningmethods. In", "style='text-align: justify;'>LSTM is well-suited to classify, process and predict time", "over alternative RNNs, hidden Markov models and other sequence learningmethods.", "is how it learns.</p>\"\"\", unsafe_allow_html=True) st.subheader('LSTM model input and output')", "are an extension of recurrent neural networks (RNNs) mainly introduced", "* 100 prediction_lstm = prediction_lstm.ravel() date_list = get_date_list_for_gmt() data =", "= go.Layout( title= \"<b>LSTM Dam Occupancy Forecasting Plot</b>\",paper_bgcolor = 'rgb(248,", "do?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM networks are an extension of recurrent", "model is left unaltered. Long-time lags in certain problems are", "given time lags of unknown duration. Relative insensitivity to gap", "daily weather data from [OpenWeatherAPI](https://openweathermap.org/api). Model input features are *Rain*,", "these features.\", unsafe_allow_html=True) LSTM_model_name = 'models/LSTM_model.h5' model_lstm = tf.keras.models.load_model(LSTM_model_name) features", "sequence learningmethods. In addition, LSTM works great because LSTM cells", "and continuous values.</p>\"\"\", unsafe_allow_html=True) st.subheader('Why we chose LSTM?') st.markdown(\"\"\"<p style='text-align:", "layout = go.Layout( title= \"<b>LSTM Dam Occupancy Forecasting Plot</b>\",paper_bgcolor =", "of recurrent neural networks (RNNs) mainly introduced to handle situations", "248, 255)',plot_bgcolor = 'rgb(248, 248, 255)',barmode = \"stack\", xaxis =", "where RNNs fail. It has been so designed that thevanishing", "training model is left unaltered. Long-time lags in certain problems", "continuous values.</p>\"\"\", unsafe_allow_html=True) st.subheader('Why we chose LSTM?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM", "that can store previous timestep information and this is how", "chose LSTM?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM is well-suited to classify, process", "date_list = get_date_list_for_gmt() data = [] layout = go.Layout( title=", "yaxis= dict(title=\"Dam Occupancy Rate (%)\",linecolor=\"#021C1E\")) line_chart= go.Scatter(x=date_list, y=prediction_lstm, marker_color='rgb(0, 200,", "Rate (%)\",linecolor=\"#021C1E\")) line_chart= go.Scatter(x=date_list, y=prediction_lstm, marker_color='rgb(0, 200, 200)' ) data.append(line_chart)", "255)',barmode = \"stack\", xaxis = dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",), yaxis= dict(title=\"Dam", "prediction_lstm = prediction_lstm.ravel() date_list = get_date_list_for_gmt() data = [] layout", "gradient problem is almost completely removed, while the training model", "st.subheader('LSTM model input and output') st.markdown(\"Model input is 7 days", "of İstanbul using these features.\", unsafe_allow_html=True) LSTM_model_name = 'models/LSTM_model.h5' model_lstm", "= tf.keras.models.load_model(LSTM_model_name) features = get_open_weather_map_data() prediction_lstm = model_lstm.predict(features) * 100", "*MaxTemp*, *MinTemp*, *AvgWind*, *AvgHumidity* and *AvgPressure*. Model predicts 7 days", "as tf import numpy from utils.get_owm_data import get_open_weather_map_data from utils.get_date", "Relative insensitivity to gap length gives an advantage to LSTM", "unknown duration. Relative insensitivity to gap length gives an advantage", "RNNs, hidden Markov models and other sequence learningmethods. In addition,", "= model_lstm.predict(features) * 100 prediction_lstm = prediction_lstm.ravel() date_list = get_date_list_for_gmt()", "to classify, process and predict time series given time lags", "def app(): st.title(\"LSTM Model\") st.subheader('What does LSTM model do?') st.markdown(\"\"\"<p", "Long-time lags in certain problems are bridged using LSTMs where", "\"#ffffff\",spikemode=\"across\",), yaxis= dict(title=\"Dam Occupancy Rate (%)\",linecolor=\"#021C1E\")) line_chart= go.Scatter(x=date_list, y=prediction_lstm, marker_color='rgb(0,", "go from plotly import tools import plotly.offline as py import", "to LSTM over alternative RNNs, hidden Markov models and other", "handle situations where RNNs fail. It has been so designed", "prediction_lstm.ravel() date_list = get_date_list_for_gmt() data = [] layout = go.Layout(", "title= \"<b>LSTM Dam Occupancy Forecasting Plot</b>\",paper_bgcolor = 'rgb(248, 248, 255)',plot_bgcolor", "thevanishing gradient problem is almost completely removed, while the training", "LSTM over alternative RNNs, hidden Markov models and other sequence", "where they also handle noise, distributed representations, and continuous values.</p>\"\"\",", "st.subheader('Why we chose LSTM?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM is well-suited to", "recurrent neural networks (RNNs) mainly introduced to handle situations where", "In addition, LSTM works great because LSTM cells have a", "problem is almost completely removed, while the training model is", "learningmethods. In addition, LSTM works great because LSTM cells have", "İstanbul using these features.\", unsafe_allow_html=True) LSTM_model_name = 'models/LSTM_model.h5' model_lstm =", "xaxis = dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",), yaxis= dict(title=\"Dam Occupancy Rate (%)\",linecolor=\"#021C1E\"))", "representations, and continuous values.</p>\"\"\", unsafe_allow_html=True) st.subheader('Why we chose LSTM?') st.markdown(\"\"\"<p", "py import plotly.express as px def app(): st.title(\"LSTM Model\") st.subheader('What", "does LSTM model do?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM networks are an", "designed that thevanishing gradient problem is almost completely removed, while", "numpy from utils.get_owm_data import get_open_weather_map_data from utils.get_date import get_date_list_for_gmt import", "plotly import tools import plotly.offline as py import plotly.express as", "been so designed that thevanishing gradient problem is almost completely", "input and output') st.markdown(\"Model input is 7 days daily weather", "LSTMs where they also handle noise, distributed representations, and continuous", "also handle noise, distributed representations, and continuous values.</p>\"\"\", unsafe_allow_html=True) st.subheader('Why", "utils.get_date import get_date_list_for_gmt import plotly.graph_objects as go from plotly import", "in certain problems are bridged using LSTMs where they also", "situations where RNNs fail. It has been so designed that", "tf.keras.models.load_model(LSTM_model_name) features = get_open_weather_map_data() prediction_lstm = model_lstm.predict(features) * 100 prediction_lstm", "using LSTMs where they also handle noise, distributed representations, and", "streamlit as st import tensorflow as tf import numpy from", "unsafe_allow_html=True) LSTM_model_name = 'models/LSTM_model.h5' model_lstm = tf.keras.models.load_model(LSTM_model_name) features = get_open_weather_map_data()", "Markov models and other sequence learningmethods. In addition, LSTM works", "cells have a memory that can store previous timestep information", "are *Rain*, *MaxTemp*, *MinTemp*, *AvgWind*, *AvgHumidity* and *AvgPressure*. Model predicts", "great because LSTM cells have a memory that can store", "input is 7 days daily weather data from [OpenWeatherAPI](https://openweathermap.org/api). Model", "features.\", unsafe_allow_html=True) LSTM_model_name = 'models/LSTM_model.h5' model_lstm = tf.keras.models.load_model(LSTM_model_name) features =", "Forecasting Plot</b>\",paper_bgcolor = 'rgb(248, 248, 255)',plot_bgcolor = 'rgb(248, 248, 255)',barmode", "values.</p>\"\"\", unsafe_allow_html=True) st.subheader('Why we chose LSTM?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM is", "classify, process and predict time series given time lags of", "features = get_open_weather_map_data() prediction_lstm = model_lstm.predict(features) * 100 prediction_lstm =", "an advantage to LSTM over alternative RNNs, hidden Markov models", "go.Scatter(x=date_list, y=prediction_lstm, marker_color='rgb(0, 200, 200)' ) data.append(line_chart) fig= go.Figure(data=data, layout=layout)", "linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",), yaxis= dict(title=\"Dam Occupancy Rate (%)\",linecolor=\"#021C1E\")) line_chart= go.Scatter(x=date_list, y=prediction_lstm,", "import get_open_weather_map_data from utils.get_date import get_date_list_for_gmt import plotly.graph_objects as go", "prediction_lstm = model_lstm.predict(features) * 100 prediction_lstm = prediction_lstm.ravel() date_list =", "LSTM cells have a memory that can store previous timestep", "plotly.offline as py import plotly.express as px def app(): st.title(\"LSTM", "import plotly.graph_objects as go from plotly import tools import plotly.offline", "st.title(\"LSTM Model\") st.subheader('What does LSTM model do?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM", "predict time series given time lags of unknown duration. Relative", "LSTM_model_name = 'models/LSTM_model.h5' model_lstm = tf.keras.models.load_model(LSTM_model_name) features = get_open_weather_map_data() prediction_lstm", "a memory that can store previous timestep information and this", "series given time lags of unknown duration. Relative insensitivity to", "model input and output') st.markdown(\"Model input is 7 days daily", "they also handle noise, distributed representations, and continuous values.</p>\"\"\", unsafe_allow_html=True)", "extension of recurrent neural networks (RNNs) mainly introduced to handle", "*Rain*, *MaxTemp*, *MinTemp*, *AvgWind*, *AvgHumidity* and *AvgPressure*. Model predicts 7", "It has been so designed that thevanishing gradient problem is", "length gives an advantage to LSTM over alternative RNNs, hidden", "time lags of unknown duration. Relative insensitivity to gap length", "certain problems are bridged using LSTMs where they also handle", "information and this is how it learns.</p>\"\"\", unsafe_allow_html=True) st.subheader('LSTM model", "to handle situations where RNNs fail. It has been so", "LSTM?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM is well-suited to classify, process and", "= [] layout = go.Layout( title= \"<b>LSTM Dam Occupancy Forecasting", "= \"stack\", xaxis = dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",), yaxis= dict(title=\"Dam Occupancy", "utils.get_owm_data import get_open_weather_map_data from utils.get_date import get_date_list_for_gmt import plotly.graph_objects as", "model_lstm.predict(features) * 100 prediction_lstm = prediction_lstm.ravel() date_list = get_date_list_for_gmt() data", "st.markdown(\"\"\"<p style='text-align: justify;'>LSTM networks are an extension of recurrent neural", "import get_date_list_for_gmt import plotly.graph_objects as go from plotly import tools", "style='text-align: justify;'>LSTM networks are an extension of recurrent neural networks", "introduced to handle situations where RNNs fail. It has been", "mainly introduced to handle situations where RNNs fail. It has", "problems are bridged using LSTMs where they also handle noise,", "addition, LSTM works great because LSTM cells have a memory", "because LSTM cells have a memory that can store previous", "= 'rgb(248, 248, 255)',plot_bgcolor = 'rgb(248, 248, 255)',barmode = \"stack\",", "plotly.graph_objects as go from plotly import tools import plotly.offline as", "is left unaltered. Long-time lags in certain problems are bridged", "= get_date_list_for_gmt() data = [] layout = go.Layout( title= \"<b>LSTM", "LSTM model do?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM networks are an extension", "rate of İstanbul using these features.\", unsafe_allow_html=True) LSTM_model_name = 'models/LSTM_model.h5'", "\"<b>LSTM Dam Occupancy Forecasting Plot</b>\",paper_bgcolor = 'rgb(248, 248, 255)',plot_bgcolor =", "from plotly import tools import plotly.offline as py import plotly.express", "from utils.get_owm_data import get_open_weather_map_data from utils.get_date import get_date_list_for_gmt import plotly.graph_objects", "as st import tensorflow as tf import numpy from utils.get_owm_data", "advantage to LSTM over alternative RNNs, hidden Markov models and", "as go from plotly import tools import plotly.offline as py", "store previous timestep information and this is how it learns.</p>\"\"\",", "plotly.express as px def app(): st.title(\"LSTM Model\") st.subheader('What does LSTM", "hidden Markov models and other sequence learningmethods. In addition, LSTM", "the training model is left unaltered. Long-time lags in certain", "'models/LSTM_model.h5' model_lstm = tf.keras.models.load_model(LSTM_model_name) features = get_open_weather_map_data() prediction_lstm = model_lstm.predict(features)", "of unknown duration. Relative insensitivity to gap length gives an", "[OpenWeatherAPI](https://openweathermap.org/api). Model input features are *Rain*, *MaxTemp*, *MinTemp*, *AvgWind*, *AvgHumidity*", "app(): st.title(\"LSTM Model\") st.subheader('What does LSTM model do?') st.markdown(\"\"\"<p style='text-align:", "handle noise, distributed representations, and continuous values.</p>\"\"\", unsafe_allow_html=True) st.subheader('Why we", "st.subheader('What does LSTM model do?') st.markdown(\"\"\"<p style='text-align: justify;'>LSTM networks are", "how it learns.</p>\"\"\", unsafe_allow_html=True) st.subheader('LSTM model input and output') st.markdown(\"Model", "output') st.markdown(\"Model input is 7 days daily weather data from", "248, 255)',barmode = \"stack\", xaxis = dict(title=\"Time\", linecolor=\"#BCCCDC\",showspikes=True,spikethickness=2,spikedash=\"dot\",spikecolor= \"#ffffff\",spikemode=\"across\",), yaxis=", "almost completely removed, while the training model is left unaltered.", "networks (RNNs) mainly introduced to handle situations where RNNs fail.", "go.Layout( title= \"<b>LSTM Dam Occupancy Forecasting Plot</b>\",paper_bgcolor = 'rgb(248, 248,", "process and predict time series given time lags of unknown", "Model input features are *Rain*, *MaxTemp*, *MinTemp*, *AvgWind*, *AvgHumidity* and", "other sequence learningmethods. In addition, LSTM works great because LSTM", "memory that can store previous timestep information and this is", "get_date_list_for_gmt import plotly.graph_objects as go from plotly import tools import", "features are *Rain*, *MaxTemp*, *MinTemp*, *AvgWind*, *AvgHumidity* and *AvgPressure*. Model", "7 days dam occupancy rate of İstanbul using these features.\",", "and output') st.markdown(\"Model input is 7 days daily weather data", "*AvgPressure*. Model predicts 7 days dam occupancy rate of İstanbul", "an extension of recurrent neural networks (RNNs) mainly introduced to", "completely removed, while the training model is left unaltered. Long-time", "neural networks (RNNs) mainly introduced to handle situations where RNNs", "so designed that thevanishing gradient problem is almost completely removed,", "7 days daily weather data from [OpenWeatherAPI](https://openweathermap.org/api). Model input features", "days daily weather data from [OpenWeatherAPI](https://openweathermap.org/api). Model input features are", "works great because LSTM cells have a memory that can", "and this is how it learns.</p>\"\"\", unsafe_allow_html=True) st.subheader('LSTM model input", "predicts 7 days dam occupancy rate of İstanbul using these", "days dam occupancy rate of İstanbul using these features.\", unsafe_allow_html=True)", "get_date_list_for_gmt() data = [] layout = go.Layout( title= \"<b>LSTM Dam", "distributed representations, and continuous values.</p>\"\"\", unsafe_allow_html=True) st.subheader('Why we chose LSTM?')" ]
[ "But a very simple way to get started is to", "{ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': path('database/fst_demo.db') } } LOG_LEVEL", "Setup mail server for sending email notifications. # You can", "sure to use a # trailing slash. # Examples: \"http://media.lawrence.com/media/\",", "lambda *args: os.path.join(ROOT, *args) \"\"\" Template for local settings of", "ALLOWED_HOSTS # and add IP address and/or domain of your", "'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': path('database/fst_demo.db') } } LOG_LEVEL =", "ADMINS = () MANAGERS = ADMINS # URL that handles", "and port of specific FST instance (do not add ending", "* DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': path('database/fst_demo.db')", "gmail account. EMAIL_USE_TLS = True EMAIL_HOST = 'smtp.gmail.com' EMAIL_PORT =", "Admins specified here receive email notifications on critical errors. ADMINS", "this to False before deploying DEBUG = True # NOTE!", "not add ending '/'). FST_SITE_URL = \"http://127.0.0.1:8000\" # TODO -", "ending '/'). FST_SITE_URL = \"http://127.0.0.1:8000\" # TODO - Check if", "way to get started is to use a gmail account.", "= ['localhost', '127.0.0.1', 'fst.magokoro.nu'] # Look for instance-specific settings try:", "Always set this to False before deploying DEBUG = True", "hosting FST service (do not add ending '/'). FST_SITE_URL =", "mail server for sending email notifications. # You can use", "address and/or domain of your site ALLOWED_HOSTS = ['localhost', '127.0.0.1',", "\"\"\" # NOTE! Always set this to False before deploying", "LOG_LEVEL = \"DEBUG\" # Enable this to override global DB", "and/or domain of your site ALLOWED_HOSTS = ['localhost', '127.0.0.1', 'fst.magokoro.nu']", "# DB_DEBUG_LEVEL = \"DEBUG\" # Setup mail server for sending", "all generic values with values suitable to your particular installation.", "EMAIL_HOST_USER = 'your email' # EMAIL_HOST_PASSWORD = '<PASSWORD>' # Admins", "\"http://example.com/media/\" MEDIA_URL = os.path.join(\"/dokument/\") # Site and port for hosting", "handles the media served from MEDIA_ROOT. Make sure to use", "use a # trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL", "a public, uncomment ALLOWED_HOSTS # and add IP address and/or", "deploying on a public, uncomment ALLOWED_HOSTS # and add IP", "'127.0.0.1', 'fst.magokoro.nu'] # Look for instance-specific settings try: from .instance_settings", "email notifications. # You can use any mail server you", "# Site and port of specific FST instance (do not", "on critical errors. ADMINS = () MANAGERS = ADMINS #", "port for hosting FST service (do not add ending '/').", "path = lambda *args: os.path.join(ROOT, *args) \"\"\" Template for local", "account. EMAIL_USE_TLS = True EMAIL_HOST = 'smtp.gmail.com' EMAIL_PORT = 587", "deploying DEBUG = True # NOTE! Before deploying on a", "removed # Site and port of specific FST instance (do", "of your site ALLOWED_HOSTS = ['localhost', '127.0.0.1', 'fst.magokoro.nu'] # Look", "False before deploying DEBUG = True # NOTE! Before deploying", "URL that handles the media served from MEDIA_ROOT. Make sure", "= \"http://127.0.0.1:8000\" # TODO - Check if FST_INSTANCE_PREFIX can be", "= True EMAIL_HOST = 'smtp.gmail.com' EMAIL_PORT = 587 # EMAIL_HOST_USER", "server for sending email notifications. # You can use any", "'django.db.backends.sqlite3', 'NAME': path('database/fst_demo.db') } } LOG_LEVEL = \"DEBUG\" # Enable", "uncomment ALLOWED_HOSTS # and add IP address and/or domain of", "# Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = os.path.join(\"/dokument/\") # Site and", "want. # But a very simple way to get started", "specific FST instance (do not add ending '/'). FST_INSTANCE_URL =", "= os.path.abspath(os.path.dirname(__file__)) path = lambda *args: os.path.join(ROOT, *args) \"\"\" Template", "MEDIA_ROOT. Make sure to use a # trailing slash. #", "= True # NOTE! Before deploying on a public, uncomment", "replace all generic values with values suitable to your particular", "= 'your email' # EMAIL_HOST_PASSWORD = '<PASSWORD>' # Admins specified", "= { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': path('database/fst_demo.db') } }", "TODO - Check if FST_INSTANCE_PREFIX can be removed # Site", "# Setup mail server for sending email notifications. # You", "a gmail account. EMAIL_USE_TLS = True EMAIL_HOST = 'smtp.gmail.com' EMAIL_PORT", "-*- import os ROOT = os.path.abspath(os.path.dirname(__file__)) path = lambda *args:", "local settings of the FST webservice (fst_web) Please edit this", "values suitable to your particular installation. \"\"\" # NOTE! Always", "Site and port of specific FST instance (do not add", "this to override global DB Debug setting # DB_DEBUG_LEVEL =", "*args: os.path.join(ROOT, *args) \"\"\" Template for local settings of the", "before deploying DEBUG = True # NOTE! Before deploying on", "sending email notifications. # You can use any mail server", "# But a very simple way to get started is", "the FST webservice (fst_web) Please edit this file and replace", "= os.path.join(\"/dokument/\") # Site and port for hosting FST service", "for hosting FST service (do not add ending '/'). FST_SITE_URL", "import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME':", "\"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = os.path.join(\"/dokument/\") # Site and port for", "{ 'ENGINE': 'django.db.backends.sqlite3', 'NAME': path('database/fst_demo.db') } } LOG_LEVEL = \"DEBUG\"", "use any mail server you want. # But a very", "of specific FST instance (do not add ending '/'). FST_INSTANCE_URL", "# NOTE! Before deploying on a public, uncomment ALLOWED_HOSTS #", "started is to use a gmail account. EMAIL_USE_TLS = True", "= ADMINS # URL that handles the media served from", "a # trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL =", "= () MANAGERS = ADMINS # URL that handles the", "you want. # But a very simple way to get", "to use a gmail account. EMAIL_USE_TLS = True EMAIL_HOST =", "email' # EMAIL_HOST_PASSWORD = '<PASSWORD>' # Admins specified here receive", "if FST_INSTANCE_PREFIX can be removed # Site and port of", "setting # DB_DEBUG_LEVEL = \"DEBUG\" # Setup mail server for", "the media served from MEDIA_ROOT. Make sure to use a", "to False before deploying DEBUG = True # NOTE! Before", "'your email' # EMAIL_HOST_PASSWORD = '<PASSWORD>' # Admins specified here", "this file and replace all generic values with values suitable", "email notifications on critical errors. ADMINS = () MANAGERS =", "site ALLOWED_HOSTS = ['localhost', '127.0.0.1', 'fst.magokoro.nu'] # Look for instance-specific", "served from MEDIA_ROOT. Make sure to use a # trailing", "to your particular installation. \"\"\" # NOTE! Always set this", "import os ROOT = os.path.abspath(os.path.dirname(__file__)) path = lambda *args: os.path.join(ROOT,", "= '<PASSWORD>' # Admins specified here receive email notifications on", "coding: utf-8 -*- import os ROOT = os.path.abspath(os.path.dirname(__file__)) path =", "to override global DB Debug setting # DB_DEBUG_LEVEL = \"DEBUG\"", "# EMAIL_HOST_PASSWORD = '<PASSWORD>' # Admins specified here receive email", "add ending '/'). FST_SITE_URL = \"http://127.0.0.1:8000\" # TODO - Check", "Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = os.path.join(\"/dokument/\") # Site and port", "= lambda *args: os.path.join(ROOT, *args) \"\"\" Template for local settings", "# Enable this to override global DB Debug setting #", "# Admins specified here receive email notifications on critical errors.", "be removed # Site and port of specific FST instance", "public, uncomment ALLOWED_HOSTS # and add IP address and/or domain", "['localhost', '127.0.0.1', 'fst.magokoro.nu'] # Look for instance-specific settings try: from", "override global DB Debug setting # DB_DEBUG_LEVEL = \"DEBUG\" #", "mail server you want. # But a very simple way", "EMAIL_HOST_PASSWORD = '<PASSWORD>' # Admins specified here receive email notifications", "True EMAIL_HOST = 'smtp.gmail.com' EMAIL_PORT = 587 # EMAIL_HOST_USER =", "'NAME': path('database/fst_demo.db') } } LOG_LEVEL = \"DEBUG\" # Enable this", "'fst.magokoro.nu'] # Look for instance-specific settings try: from .instance_settings import", "EMAIL_HOST = 'smtp.gmail.com' EMAIL_PORT = 587 # EMAIL_HOST_USER = 'your", "Debug setting # DB_DEBUG_LEVEL = \"DEBUG\" # Setup mail server", "*args) \"\"\" Template for local settings of the FST webservice", "receive email notifications on critical errors. ADMINS = () MANAGERS", "your site ALLOWED_HOSTS = ['localhost', '127.0.0.1', 'fst.magokoro.nu'] # Look for", "port of specific FST instance (do not add ending '/').", "instance (do not add ending '/'). FST_INSTANCE_URL = os.path.join( \"http://127.0.0.1:8000\",", "-*- coding: utf-8 -*- import os ROOT = os.path.abspath(os.path.dirname(__file__)) path", "# and add IP address and/or domain of your site", "FST service (do not add ending '/'). FST_SITE_URL = \"http://127.0.0.1:8000\"", "from .instance_settings import * except ImportError: from .default_instance_settings import *", "for local settings of the FST webservice (fst_web) Please edit", "specified here receive email notifications on critical errors. ADMINS =", "particular installation. \"\"\" # NOTE! Always set this to False", "FST_INSTANCE_PREFIX can be removed # Site and port of specific", "webservice (fst_web) Please edit this file and replace all generic", "import * except ImportError: from .default_instance_settings import * DATABASES =", "= 587 # EMAIL_HOST_USER = 'your email' # EMAIL_HOST_PASSWORD =", "service (do not add ending '/'). FST_SITE_URL = \"http://127.0.0.1:8000\" #", "here receive email notifications on critical errors. ADMINS = ()", "= 'smtp.gmail.com' EMAIL_PORT = 587 # EMAIL_HOST_USER = 'your email'", "and replace all generic values with values suitable to your", "Enable this to override global DB Debug setting # DB_DEBUG_LEVEL", "trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = os.path.join(\"/dokument/\") #", "Site and port for hosting FST service (do not add", "ROOT = os.path.abspath(os.path.dirname(__file__)) path = lambda *args: os.path.join(ROOT, *args) \"\"\"", "values with values suitable to your particular installation. \"\"\" #", "# -*- coding: utf-8 -*- import os ROOT = os.path.abspath(os.path.dirname(__file__))", "\"DEBUG\" # Setup mail server for sending email notifications. #", "587 # EMAIL_HOST_USER = 'your email' # EMAIL_HOST_PASSWORD = '<PASSWORD>'", "- Check if FST_INSTANCE_PREFIX can be removed # Site and", "except ImportError: from .default_instance_settings import * DATABASES = { 'default':", "from .default_instance_settings import * DATABASES = { 'default': { 'ENGINE':", "a very simple way to get started is to use", "FST webservice (fst_web) Please edit this file and replace all", "# Look for instance-specific settings try: from .instance_settings import *", "for instance-specific settings try: from .instance_settings import * except ImportError:", "can use any mail server you want. # But a", "use a gmail account. EMAIL_USE_TLS = True EMAIL_HOST = 'smtp.gmail.com'", "os.path.join(\"/dokument/\") # Site and port for hosting FST service (do", "# You can use any mail server you want. #", "Please edit this file and replace all generic values with", "FST_SITE_URL = \"http://127.0.0.1:8000\" # TODO - Check if FST_INSTANCE_PREFIX can", "utf-8 -*- import os ROOT = os.path.abspath(os.path.dirname(__file__)) path = lambda", "set this to False before deploying DEBUG = True #", "that handles the media served from MEDIA_ROOT. Make sure to", "= \"DEBUG\" # Enable this to override global DB Debug", "get started is to use a gmail account. EMAIL_USE_TLS =", "suitable to your particular installation. \"\"\" # NOTE! Always set", "of the FST webservice (fst_web) Please edit this file and", "DB_DEBUG_LEVEL = \"DEBUG\" # Setup mail server for sending email", "notifications on critical errors. ADMINS = () MANAGERS = ADMINS", "} LOG_LEVEL = \"DEBUG\" # Enable this to override global", "global DB Debug setting # DB_DEBUG_LEVEL = \"DEBUG\" # Setup", "try: from .instance_settings import * except ImportError: from .default_instance_settings import", "DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': path('database/fst_demo.db') }", "domain of your site ALLOWED_HOSTS = ['localhost', '127.0.0.1', 'fst.magokoro.nu'] #", "DB Debug setting # DB_DEBUG_LEVEL = \"DEBUG\" # Setup mail", "os.path.join(ROOT, *args) \"\"\" Template for local settings of the FST", "for sending email notifications. # You can use any mail", "ALLOWED_HOSTS = ['localhost', '127.0.0.1', 'fst.magokoro.nu'] # Look for instance-specific settings", "# EMAIL_HOST_USER = 'your email' # EMAIL_HOST_PASSWORD = '<PASSWORD>' #", "\"\"\" Template for local settings of the FST webservice (fst_web)", "True # NOTE! Before deploying on a public, uncomment ALLOWED_HOSTS", "on a public, uncomment ALLOWED_HOSTS # and add IP address", "settings of the FST webservice (fst_web) Please edit this file", "simple way to get started is to use a gmail", "critical errors. ADMINS = () MANAGERS = ADMINS # URL", "Before deploying on a public, uncomment ALLOWED_HOSTS # and add", "instance-specific settings try: from .instance_settings import * except ImportError: from", "is to use a gmail account. EMAIL_USE_TLS = True EMAIL_HOST", "any mail server you want. # But a very simple", "DEBUG = True # NOTE! Before deploying on a public,", "os ROOT = os.path.abspath(os.path.dirname(__file__)) path = lambda *args: os.path.join(ROOT, *args)", "file and replace all generic values with values suitable to", "very simple way to get started is to use a", "(do not add ending '/'). FST_INSTANCE_URL = os.path.join( \"http://127.0.0.1:8000\", FST_INSTANCE_PREFIX)", "os.path.abspath(os.path.dirname(__file__)) path = lambda *args: os.path.join(ROOT, *args) \"\"\" Template for", "IP address and/or domain of your site ALLOWED_HOSTS = ['localhost',", "add IP address and/or domain of your site ALLOWED_HOSTS =", "ADMINS # URL that handles the media served from MEDIA_ROOT.", "your particular installation. \"\"\" # NOTE! Always set this to", "NOTE! Before deploying on a public, uncomment ALLOWED_HOSTS # and", "# trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = os.path.join(\"/dokument/\")", "errors. ADMINS = () MANAGERS = ADMINS # URL that", "Look for instance-specific settings try: from .instance_settings import * except", "MANAGERS = ADMINS # URL that handles the media served", "# Site and port for hosting FST service (do not", "'smtp.gmail.com' EMAIL_PORT = 587 # EMAIL_HOST_USER = 'your email' #", "settings try: from .instance_settings import * except ImportError: from .default_instance_settings", "installation. \"\"\" # NOTE! Always set this to False before", "# URL that handles the media served from MEDIA_ROOT. Make", "EMAIL_USE_TLS = True EMAIL_HOST = 'smtp.gmail.com' EMAIL_PORT = 587 #", "Make sure to use a # trailing slash. # Examples:", "'ENGINE': 'django.db.backends.sqlite3', 'NAME': path('database/fst_demo.db') } } LOG_LEVEL = \"DEBUG\" #", "with values suitable to your particular installation. \"\"\" # NOTE!", ".default_instance_settings import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3',", "\"DEBUG\" # Enable this to override global DB Debug setting", "to get started is to use a gmail account. EMAIL_USE_TLS", "# NOTE! Always set this to False before deploying DEBUG", "and port for hosting FST service (do not add ending", "= \"DEBUG\" # Setup mail server for sending email notifications.", "to use a # trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\"", "(fst_web) Please edit this file and replace all generic values", "} } LOG_LEVEL = \"DEBUG\" # Enable this to override", "(do not add ending '/'). FST_SITE_URL = \"http://127.0.0.1:8000\" # TODO", "FST instance (do not add ending '/'). FST_INSTANCE_URL = os.path.join(", "server you want. # But a very simple way to", "edit this file and replace all generic values with values", "'<PASSWORD>' # Admins specified here receive email notifications on critical", "notifications. # You can use any mail server you want.", "Template for local settings of the FST webservice (fst_web) Please", "slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = os.path.join(\"/dokument/\") # Site", "\"http://127.0.0.1:8000\" # TODO - Check if FST_INSTANCE_PREFIX can be removed", "path('database/fst_demo.db') } } LOG_LEVEL = \"DEBUG\" # Enable this to", "and add IP address and/or domain of your site ALLOWED_HOSTS", "* except ImportError: from .default_instance_settings import * DATABASES = {", "from MEDIA_ROOT. Make sure to use a # trailing slash.", "() MANAGERS = ADMINS # URL that handles the media", "NOTE! Always set this to False before deploying DEBUG =", "generic values with values suitable to your particular installation. \"\"\"", "can be removed # Site and port of specific FST", "MEDIA_URL = os.path.join(\"/dokument/\") # Site and port for hosting FST", "You can use any mail server you want. # But", "media served from MEDIA_ROOT. Make sure to use a #", ".instance_settings import * except ImportError: from .default_instance_settings import * DATABASES", "# TODO - Check if FST_INSTANCE_PREFIX can be removed #", "Check if FST_INSTANCE_PREFIX can be removed # Site and port", "EMAIL_PORT = 587 # EMAIL_HOST_USER = 'your email' # EMAIL_HOST_PASSWORD", "ImportError: from .default_instance_settings import * DATABASES = { 'default': {", "'/'). FST_SITE_URL = \"http://127.0.0.1:8000\" # TODO - Check if FST_INSTANCE_PREFIX" ]
[ "+ str(offset) r = requests.get(url, headers= {'User-Agent': 'Mozilla/5.0 (Windows NT", "{'class': 'jq_tooltip'})\") #print(ho.find('a', {'class': 'jq_tooltip'})) #name = ho.find('a', {'class': 'jq_tooltip'})['data-title']", "name = str(ho.find('span', {'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else: name = '-1' if", "is not None: nota = str(ho.find('span', {'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else :", "'-1' if ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}) is not None: price", "country, dest_id, DayIni,DayFim) all_offset = parsed_html.find_all('li', {'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads =", "'_ky9opu0'}) is not None: nota = str(ho.find('span', {'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else", "[] for i in range(int(all_offset)): offset += 1 t =", "file ''' print('Procurando por',country) hotels_list = prep_data(rooms, country,dest_id, DayIni, DayFim,", "= '----' # else: # distance = '----' # if", "Identificado' locais = [d['Pais'] + ':' + d['dest_id'] for d", "for d in localidades if d['Pais'] != ''] print('----------') print('Utilize", "dest_id='-1', DayIni='01/01/2019', DayFim='02/01/2019', out_format=None): ''' Prepare data for saving :return:", "f: countries = f.read().splitlines() return countries def get_booking_page(session, offset, rooms,", "distance) #hotels.append(str(len(hotels) + 1) + ' : ' + name", "= '-1' if ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}) is not None:", "BookingThread(session, offset, rooms, country,dest_id,DayIni, DayFim, process_hotels) threads.append(t) for t in", "{'class': 'bui-price-display__value prco-inline-block-maker-helper'}) is not None: price = ho.find('div', {'class':", "= BeautifulSoup(html, 'lxml') return parsed_html def process_hotels(session, offset, rooms, country,", "'km': # distance = result # else: # distance =", "accomodations in Macedonia and save them in file :return: hotels-in-macedonia.{txt/csv/xlsx}", "choices=['json', 'excel', 'csv'], nargs='?').completer = EnvironCompleter argcomplete.autocomplete(parser) args = parser.parse_args()", ":param data: hotels list :param out_format: json, csv or excel", "rooms, country, dest_id, DayIni, DayFim): print('get_booking_page(session, offset, rooms, country, dest_id,", "bs4 import BeautifulSoup from file_writer import FileWriter hotels = []", "prep_data(rooms=1, country='Macedonia', dest_id='-1', DayIni='01/01/2019', DayFim='02/01/2019', out_format=None): ''' Prepare data for", "in 'km': # distance = str(i) # else: # distance", "+ ':' + d['dest_id'] for d in localidades if d['Pais']", "in localidades if args.dest_id in d['dest_id']] if len(countryAux)>0: country =", "6.1; Win64; x64; rv:47.0)' ' Gecko/20100101 Firefox/48.0'}) html = r.content", "the booking request.', default=1, type=int, nargs='?') parser.add_argument(\"--country\", help='Add the country", "help='Add the format for the output file. Add excel, json", "print('----------') print('Utilize uma das seguintes localizações') for i in locais:", "(Windows NT 6.1; Win64; x64; rv:47.0)' ' Gecko/20100101 Firefox/48.0'}) html", "'sr_item'}) for ho in hotel: #print(\"ho.find('a', {'class': 'jq_tooltip'})\") #print(ho.find('a', {'class':", "country, dest_id,DayIni, DayFim) hotel = parsed_html.find_all('div', {'class': 'sr_item'}) for ho", "country): ''' Saves hotels list in file :param data: hotels", "if d['Pais'] != ''] print('----------') print('Utilize uma das seguintes localizações')", "/usr/bin/env python3.6 import argparse import argcomplete from argcomplete.completers import ChoicesCompleter", "# else: # distance = '----' # else: # distance", "import BeautifulSoup from file_writer import FileWriter hotels = [] def", "r = requests.get(url, headers= {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64;", "por',country) hotels_list = prep_data(rooms, country,dest_id, DayIni, DayFim, out_format) save_data(hotels_list ,", "number of rooms to the booking request.', default=1, type=int, nargs='?')", "country, dest_id, DayIni, DayFim): parsed_html = get_booking_page(session, offset, rooms, country,", "accommodations are saved.') print('You can find them in', file, 'file')", "excel, json or csv.', default='json', choices=['json', 'excel', 'csv'], nargs='?').completer =", "parsed_html = get_booking_page(session, offset, rooms, country, dest_id, DayIni,DayFim) all_offset =", "in ho.find_all('span', attrs={'data-bui-component' : 'Tooltip'})] # print('TAMANHO TOOLTIP', str(len(result))) #", "str(offset) r = requests.get(url, headers= {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1;", "= '---' hotels.append(DayIni+';'+DayFim+';'+name + ';' + price + ';' +", "print('Procurando por',country) hotels_list = prep_data(rooms, country,dest_id, DayIni, DayFim, out_format) save_data(hotels_list", "for the output file. Add excel, json or csv.', default='json',", "country to the booking request.', default='0', nargs='?') parser.add_argument(\"--DayIni\", help='Data inicial',", "to the booking request.', default='0', nargs='?') parser.add_argument(\"--DayIni\", help='Data inicial', default='01/01/2019',", "'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0)' ' Gecko/20100101 Firefox/48.0'})", "{'class': 'jq_tooltip'})) #name = ho.find('a', {'class': 'jq_tooltip'})['data-title'] print(\"ho.find('span', {'class': 'sr-hotel__name'})\")", "DayIni, DayFim): parsed_html = get_booking_page(session, offset, rooms, country, dest_id,DayIni, DayFim)", ":return: ''' writer = FileWriter(data, out_format, country) file = writer.output_file()", "hotels list in file :param data: hotels list :param out_format:", "#! /usr/bin/env python3.6 import argparse import argcomplete from argcomplete.completers import", "type=int, nargs='?') parser.add_argument(\"--country\", help='Add the country to the booking request.',", "distance on the map. Actual travel distance may vary.'}) is", "distance may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else : distance = '-1' # if", "country) file = writer.output_file() print('All accommodations are saved.') print('You can", "and parse html :param offset: :return: html page ''' url", "''' offset = 1 session = requests.Session() parsed_html = get_booking_page(session,", "in Macedonia and save them in file :return: hotels-in-macedonia.{txt/csv/xlsx} file", "find them in', file, 'file') if __name__ == \"__main__\": parser", "booking request.', default='0', nargs='?') parser.add_argument(\"--DayIni\", help='Data inicial', default='01/01/2019', nargs='?') parser.add_argument(\"--DayFim\",", "from argcomplete.completers import EnvironCompleter import requests from bthread import BookingThread", "set() ''' offset = 1 session = requests.Session() parsed_html =", "offset, rooms, country, dest_id, DayIni, DayFim): print('get_booking_page(session, offset, rooms, country,", "country,args.dest_id,args.DayIni,args.DayFim, args.out_format) else: country = 'Nao Identificado' locais = [d['Pais']", "str(ho.find('span', {'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else : nota = '-1' if ho.find('span',", "= [d['Pais'] + ':' + d['dest_id'] for d in localidades", "= prep_data(rooms, country,dest_id, DayIni, DayFim, out_format) save_data(hotels_list , out_format=out_format, country=country)", "print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) else: country = 'Nao", "country, dest_id, DayIni, DayFim) diaInicial = str(int(DayIni[0:2])) mesInicial = str(int(DayIni[3:5]))", "for item in ho.find_all('span', attrs={'data-bui-component' : 'Tooltip'})] # print('TAMANHO TOOLTIP',", "offset, rooms, country, dest_id, DayIni,DayFim) all_offset = parsed_html.find_all('li', {'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1]", "= hotels return hotels2 def get_data(rooms=1, country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None): '''", "attrs={'data-bui-component' : 'Tooltip'})] # print('TAMANHO TOOLTIP', str(len(result))) # for i", "i in 'km': # distance = str(i) # else: #", "if ho.find('span', {'title': 'This is the straight-line distance on the", "json or csv.', default='json', choices=['json', 'excel', 'csv'], nargs='?').completer = EnvironCompleter", "'aaaaa' + str(len(result)) # else: # distance = '---' hotels.append(DayIni+';'+DayFim+';'+name", "argcomplete.completers import EnvironCompleter import requests from bthread import BookingThread from", "Actual travel distance may vary.'}) is not None: distance =", "}] countryAux = [d['Pais'] for d in localidades if args.dest_id", "i in range(int(all_offset)): offset += 1 t = BookingThread(session, offset,", "args = parser.parse_args() localidades = [{ 'Pais': 'London', 'dest_id': '-2601889'", "print(\"ho.find('span', {'class': 'sr-hotel__name'})\") #print(ho.find('span', {'class': 'sr-hotel__name'})) if ho.find('span', {'class': 'sr-hotel__name'})", "rooms, country, dest_id, DayIni,DayFim) all_offset = parsed_html.find_all('li', {'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads", "else : distance = '-1' # if ho.find('a', {'class': 'bui-link'})", "str(int(DayIni[6:10])) diaFinal = str(int(DayFim[0:2])) mesFinal = str(int(DayFim[3:5])) anoFinal = str(int(DayFim[6:10]))", "BeautifulSoup(html, 'lxml') return parsed_html def process_hotels(session, offset, rooms, country, dest_id,", "{'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads = [] for i in range(int(all_offset)): offset", "Macedonia and save them in file :return: hotels-in-macedonia.{txt/csv/xlsx} file '''", "range(int(all_offset)): offset += 1 t = BookingThread(session, offset, rooms, country,dest_id,DayIni,", "output file. Add excel, json or csv.', default='json', choices=['json', 'excel',", "= writer.output_file() print('All accommodations are saved.') print('You can find them", "Prepare data for saving :return: hotels: set() ''' offset =", "out_format) save_data(hotels_list , out_format=out_format, country=country) def save_data(data, out_format, country): '''", "save_data(data, out_format, country): ''' Saves hotels list in file :param", "# print(i) # for i in result: # if i", "out_format, country) file = writer.output_file() print('All accommodations are saved.') print('You", "'&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace(' ', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) + str(offset) r = requests.get(url,", "= argparse.ArgumentParser() countries = get_countries() parser.add_argument(\"--rooms\", help='Add the number of", "t in threads: t.join() hotels2 = hotels return hotels2 def", "is not None: distance = str(ho.find('span', {'title': 'This is the", "'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads = [] for i in range(int(all_offset)): offset +=", "out_format=None): ''' Get all accomodations in Macedonia and save them", ": nota = '-1' if ho.find('span', {'title': 'This is the", "d in localidades if d['Pais'] != ''] print('----------') print('Utilize uma", "+ ';' + nota + ';' + distance) #hotels.append(str(len(hotels) +", "price = '-1' if ho.find('span', {'class': '_ky9opu0'}) is not None:", "out_format=out_format, country=country) def save_data(data, out_format, country): ''' Saves hotels list", "'&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace(' ', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) + str(offset)", "print('You can find them in', file, 'file') if __name__ ==", "'+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) + str(offset) r = requests.get(url, headers= {'User-Agent': 'Mozilla/5.0 (Windows", "bthread import BookingThread from bs4 import BeautifulSoup from file_writer import", "' : ' + name + ' : ' +", "parser.add_argument(\"--dest_id\", help='Add the country to the booking request.', default='0', nargs='?')", "offset += 1 t = BookingThread(session, offset, rooms, country,dest_id,DayIni, DayFim,", "booking request.', default='Macedonia', nargs='?').completer = ChoicesCompleter(countries) parser.add_argument(\"--dest_id\", help='Add the country", ", out_format=out_format, country=country) def save_data(data, out_format, country): ''' Saves hotels", "t = BookingThread(session, offset, rooms, country,dest_id,DayIni, DayFim, process_hotels) threads.append(t) for", "process_hotels(session, offset, rooms, country, dest_id, DayIni, DayFim): parsed_html = get_booking_page(session,", "= str(ho.find('span', {'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else : nota = '-1' if", "d['dest_id']] if len(countryAux)>0: country = countryAux[0] print('Parametros') print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format)", "booking request.', default=1, type=int, nargs='?') parser.add_argument(\"--country\", help='Add the country to", "dest_id, DayIni, DayFim): print('get_booking_page(session, offset, rooms, country, dest_id, DayIni, DayFim):')", "None: price = ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else: price =", "= requests.Session() parsed_html = get_booking_page(session, offset, rooms, country, dest_id, DayIni,DayFim)", "parsed_html def process_hotels(session, offset, rooms, country, dest_id, DayIni, DayFim): parsed_html", "item in ho.find_all('span', attrs={'data-bui-component' : 'Tooltip'})] # print('TAMANHO TOOLTIP', str(len(result)))", "in result: # print(i) # for i in result: #", "argparse.ArgumentParser() countries = get_countries() parser.add_argument(\"--rooms\", help='Add the number of rooms", "get_booking_page(session, offset, rooms, country, dest_id, DayIni, DayFim): print('get_booking_page(session, offset, rooms,", "airbnb page and parse html :param offset: :return: html page", "ChoicesCompleter(countries) parser.add_argument(\"--dest_id\", help='Add the country to the booking request.', default='0',", "may vary.'}) is not None: distance = str(ho.find('span', {'title': 'This", "on the map. Actual travel distance may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else :", "str(i) # else: # distance = '----' # else: #", "country=country.replace(' ', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) + str(offset) r = requests.get(url, headers= {'User-Agent':", "''' Get all accomodations in Macedonia and save them in", "import argparse import argcomplete from argcomplete.completers import ChoicesCompleter from argcomplete.completers", "f.read().splitlines() return countries def get_booking_page(session, offset, rooms, country, dest_id, DayIni,", "__name__ == \"__main__\": parser = argparse.ArgumentParser() countries = get_countries() parser.add_argument(\"--rooms\",", "or csv.', default='json', choices=['json', 'excel', 'csv'], nargs='?').completer = EnvironCompleter argcomplete.autocomplete(parser)", "str(len(result))) # for i in result: # print(i) # for", "# distance = '----' # else: # distance = '----'", "travel distance may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else : distance = '-1' #", "hotels_list = prep_data(rooms, country,dest_id, DayIni, DayFim, out_format) save_data(hotels_list , out_format=out_format,", "html = r.content print(url) parsed_html = BeautifulSoup(html, 'lxml') return parsed_html", "travel distance may vary.'}) is not None: distance = str(ho.find('span',", "1 t = BookingThread(session, offset, rooms, country,dest_id,DayIni, DayFim, process_hotels) threads.append(t)", "in threads: t.join() hotels2 = hotels return hotels2 def get_data(rooms=1,", "'Utrecht', 'dest_id': '-2154382' }, { 'Pais': 'Buzios', 'dest_id': '-626254' },", "ho.find('a', {'class': 'bui-link'}) is not None : # result =", "the number of rooms to the booking request.', default=1, type=int,", "'sr-hotel__name'})) if ho.find('span', {'class': 'sr-hotel__name'}) is not None: name =", "[d['Pais'] for d in localidades if args.dest_id in d['dest_id']] if", "prco-inline-block-maker-helper'}) is not None: price = ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")", ": ' + price) def prep_data(rooms=1, country='Macedonia', dest_id='-1', DayIni='01/01/2019', DayFim='02/01/2019',", "args.dest_id in d['dest_id']] if len(countryAux)>0: country = countryAux[0] print('Parametros') print(args.rooms,", "DayIni='01/01/2019', DayFim='02/01/2019', out_format=None): ''' Prepare data for saving :return: hotels:", "''' writer = FileWriter(data, out_format, country) file = writer.output_file() print('All", "in file :return: hotels-in-macedonia.{txt/csv/xlsx} file ''' print('Procurando por',country) hotels_list =", "mesFinal = str(int(DayFim[3:5])) anoFinal = str(int(DayFim[6:10])) ''' Make request to", "''' Prepare data for saving :return: hotels: set() ''' offset", "for i in range(int(all_offset)): offset += 1 t = BookingThread(session,", "'-2601889' }, { 'Pais': 'Utrecht', 'dest_id': '-2154382' }, { 'Pais':", "#print(\"ho.find('a', {'class': 'jq_tooltip'})\") #print(ho.find('a', {'class': 'jq_tooltip'})) #name = ho.find('a', {'class':", "DayFim): parsed_html = get_booking_page(session, offset, rooms, country, dest_id,DayIni, DayFim) hotel", "distance = 'aaaaa' + str(len(result)) # else: # distance =", ":return: hotels-in-macedonia.{txt/csv/xlsx} file ''' print('Procurando por',country) hotels_list = prep_data(rooms, country,dest_id,", "'Tooltip'})] # print('TAMANHO TOOLTIP', str(len(result))) # for i in result:", "= BookingThread(session, offset, rooms, country,dest_id,DayIni, DayFim, process_hotels) threads.append(t) for t", "'----' # else: # distance = '----' # if len(result)", "file_writer import FileWriter hotels = [] def get_countries(): with open(\"europa2020.txt\",", "= ChoicesCompleter(countries) parser.add_argument(\"--dest_id\", help='Add the country to the booking request.',", "hotels list :param out_format: json, csv or excel :return: '''", "to the booking request.', default=1, type=int, nargs='?') parser.add_argument(\"--country\", help='Add the", "' + price) def prep_data(rooms=1, country='Macedonia', dest_id='-1', DayIni='01/01/2019', DayFim='02/01/2019', out_format=None):", ": 'Tooltip'})] # print('TAMANHO TOOLTIP', str(len(result))) # for i in", "print(session, offset, rooms, country, dest_id, DayIni, DayFim) diaInicial = str(int(DayIni[0:2]))", "is the straight-line distance on the map. Actual travel distance", "# else: # distance = '---' hotels.append(DayIni+';'+DayFim+';'+name + ';' +", "if len(countryAux)>0: country = countryAux[0] print('Parametros') print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) get_data(args.rooms,", "# distance = 'aaaaa' + str(len(result)) # else: # distance", "ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else: price = '-1' if ho.find('span',", "dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None): ''' Get all accomodations in Macedonia and save", "BeautifulSoup from file_writer import FileWriter hotels = [] def get_countries():", "= [] def get_countries(): with open(\"europa2020.txt\", \"r\") as f: countries", "to airbnb page and parse html :param offset: :return: html", "# if len(result) ==1: # if result[0] in 'km': #", "price + ';' + nota + ';' + distance) #hotels.append(str(len(hotels)", "argcomplete.autocomplete(parser) args = parser.parse_args() localidades = [{ 'Pais': 'London', 'dest_id':", "';' + price + ';' + nota + ';' +", "'Pais': 'London', 'dest_id': '-2601889' }, { 'Pais': 'Utrecht', 'dest_id': '-2154382'", "country = countryAux[0] print('Parametros') print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format)", "else: name = '-1' if ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}) is", "import EnvironCompleter import requests from bthread import BookingThread from bs4", "threads: t.join() hotels2 = hotels return hotels2 def get_data(rooms=1, country='Macedonia',", "hotel: #print(\"ho.find('a', {'class': 'jq_tooltip'})\") #print(ho.find('a', {'class': 'jq_tooltip'})) #name = ho.find('a',", "+ price) def prep_data(rooms=1, country='Macedonia', dest_id='-1', DayIni='01/01/2019', DayFim='02/01/2019', out_format=None): '''", "print('Utilize uma das seguintes localizações') for i in locais: print(i)", "nargs='?').completer = ChoicesCompleter(countries) parser.add_argument(\"--dest_id\", help='Add the country to the booking", "#print(ho.find('span', {'class': 'sr-hotel__name'})) if ho.find('span', {'class': 'sr-hotel__name'}) is not None:", "len(result) ==1: # if result[0] in 'km': # distance =", "distance = '----' # if len(result) ==1: # if result[0]", "{'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0)' ' Gecko/20100101", "offset, rooms, country,dest_id,DayIni, DayFim, process_hotels) threads.append(t) for t in threads:", "nargs='?') parser.add_argument(\"--out_format\", help='Add the format for the output file. Add", "= parsed_html.find_all('div', {'class': 'sr_item'}) for ho in hotel: #print(\"ho.find('a', {'class':", "get_countries() parser.add_argument(\"--rooms\", help='Add the number of rooms to the booking", "+ ';' + price + ';' + nota + ';'", "[str(item) for item in ho.find_all('span', attrs={'data-bui-component' : 'Tooltip'})] # print('TAMANHO", "= r.content print(url) parsed_html = BeautifulSoup(html, 'lxml') return parsed_html def", "parsed_html.find_all('div', {'class': 'sr_item'}) for ho in hotel: #print(\"ho.find('a', {'class': 'jq_tooltip'})\")", "'sr-hotel__name'}) is not None: name = str(ho.find('span', {'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else:", "# print('TAMANHO TOOLTIP', str(len(result))) # for i in result: #", "country=country) def save_data(data, out_format, country): ''' Saves hotels list in", "result: # print(i) # for i in result: # if", "'km': # distance = str(i) # else: # distance =", "data: hotels list :param out_format: json, csv or excel :return:", "'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else: price = '-1' if ho.find('span', {'class': '_ky9opu0'})", "'file') if __name__ == \"__main__\": parser = argparse.ArgumentParser() countries =", "= FileWriter(data, out_format, country) file = writer.output_file() print('All accommodations are", "t.start() for t in threads: t.join() hotels2 = hotels return", "parser.add_argument(\"--rooms\", help='Add the number of rooms to the booking request.',", "and save them in file :return: hotels-in-macedonia.{txt/csv/xlsx} file ''' print('Procurando", "to the booking request.', default='Macedonia', nargs='?').completer = ChoicesCompleter(countries) parser.add_argument(\"--dest_id\", help='Add", "anoInicial = str(int(DayIni[6:10])) diaFinal = str(int(DayFim[0:2])) mesFinal = str(int(DayFim[3:5])) anoFinal", "in range(int(all_offset)): offset += 1 t = BookingThread(session, offset, rooms,", "localidades = [{ 'Pais': 'London', 'dest_id': '-2601889' }, { 'Pais':", "parser.add_argument(\"--country\", help='Add the country to the booking request.', default='Macedonia', nargs='?').completer", "None: nota = str(ho.find('span', {'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else : nota =", "may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else : distance = '-1' # if ho.find('a',", "distance = '-1' # if ho.find('a', {'class': 'bui-link'}) is not", "country to the booking request.', default='Macedonia', nargs='?').completer = ChoicesCompleter(countries) parser.add_argument(\"--dest_id\",", "list in file :param data: hotels list :param out_format: json,", "default='json', choices=['json', 'excel', 'csv'], nargs='?').completer = EnvironCompleter argcomplete.autocomplete(parser) args =", "file. Add excel, json or csv.', default='json', choices=['json', 'excel', 'csv'],", "= '----' # if len(result) ==1: # if result[0] in", "is not None : # result = [str(item) for item", "requests.Session() parsed_html = get_booking_page(session, offset, rooms, country, dest_id, DayIni,DayFim) all_offset", "==1: # if result[0] in 'km': # distance = result", "EnvironCompleter argcomplete.autocomplete(parser) args = parser.parse_args() localidades = [{ 'Pais': 'London',", "= str(ho.find('span', {'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else: name = '-1' if ho.find('div',", "from argcomplete.completers import ChoicesCompleter from argcomplete.completers import EnvironCompleter import requests", "{'class': '_ky9opu0'}) is not None: nota = str(ho.find('span', {'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\"))", "'&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace(' ', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) + str(offset) r = requests.get(url, headers=", "if len(result) ==1: # if result[0] in 'km': # distance", "for i in result: # if i in 'km': #", "page and parse html :param offset: :return: html page '''", "distance may vary.'}) is not None: distance = str(ho.find('span', {'title':", "for t in threads: t.start() for t in threads: t.join()", "DayIni, DayFim): print('get_booking_page(session, offset, rooms, country, dest_id, DayIni, DayFim):') print(session,", "1 session = requests.Session() parsed_html = get_booking_page(session, offset, rooms, country,", "{'title': 'This is the straight-line distance on the map. Actual", "''' Saves hotels list in file :param data: hotels list", "diaInicial = str(int(DayIni[0:2])) mesInicial = str(int(DayIni[3:5])) anoInicial = str(int(DayIni[6:10])) diaFinal", "on the map. Actual travel distance may vary.'}) is not", "import FileWriter hotels = [] def get_countries(): with open(\"europa2020.txt\", \"r\")", "dest_id, DayIni, DayFim):') print(session, offset, rooms, country, dest_id, DayIni, DayFim)", "offset, rooms, country, dest_id, DayIni, DayFim):') print(session, offset, rooms, country,", "' : ' + price) def prep_data(rooms=1, country='Macedonia', dest_id='-1', DayIni='01/01/2019',", "= [{ 'Pais': 'London', 'dest_id': '-2601889' }, { 'Pais': 'Utrecht',", "distance = result # else: # distance = 'aaaaa' +", "= str(int(DayFim[3:5])) anoFinal = str(int(DayFim[6:10])) ''' Make request to airbnb", "'dest_id': '-626254' }, { 'Pais': '', 'dest_id': '' }] countryAux", ":return: hotels: set() ''' offset = 1 session = requests.Session()", "Actual travel distance may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else : distance = '-1'", "+ ' : ' + price) def prep_data(rooms=1, country='Macedonia', dest_id='-1',", "{'class': 'sr-hotel__name'})) if ho.find('span', {'class': 'sr-hotel__name'}) is not None: name", "in result: # if i in 'km': # distance =", "default=1, type=int, nargs='?') parser.add_argument(\"--country\", help='Add the country to the booking", "= countryAux[0] print('Parametros') print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) else:", "ho.find_all('span', attrs={'data-bui-component' : 'Tooltip'})] # print('TAMANHO TOOLTIP', str(len(result))) # for", "in threads: t.start() for t in threads: t.join() hotels2 =", "parser = argparse.ArgumentParser() countries = get_countries() parser.add_argument(\"--rooms\", help='Add the number", "as f: countries = f.read().splitlines() return countries def get_booking_page(session, offset,", "ho.find('span', {'class': 'sr-hotel__name'}) is not None: name = str(ho.find('span', {'class':", "i in result: # print(i) # for i in result:", "excel :return: ''' writer = FileWriter(data, out_format, country) file =", "help='Add the country to the booking request.', default='0', nargs='?') parser.add_argument(\"--DayIni\",", "= 'Nao Identificado' locais = [d['Pais'] + ':' + d['dest_id']", "'-626254' }, { 'Pais': '', 'dest_id': '' }] countryAux =", "for t in threads: t.join() hotels2 = hotels return hotels2", "{'class': 'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else: price = '-1' if ho.find('span', {'class':", "'jq_tooltip'})) #name = ho.find('a', {'class': 'jq_tooltip'})['data-title'] print(\"ho.find('span', {'class': 'sr-hotel__name'})\") #print(ho.find('span',", "writer = FileWriter(data, out_format, country) file = writer.output_file() print('All accommodations", "vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else : distance = '-1' # if ho.find('a', {'class':", "threads.append(t) for t in threads: t.start() for t in threads:", "args.out_format) else: country = 'Nao Identificado' locais = [d['Pais'] +", "prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else: price = '-1' if ho.find('span', {'class': '_ky9opu0'}) is", "}, { 'Pais': '', 'dest_id': '' }] countryAux = [d['Pais']", "parser.parse_args() localidades = [{ 'Pais': 'London', 'dest_id': '-2601889' }, {", "json, csv or excel :return: ''' writer = FileWriter(data, out_format,", "'sr-hotel__name'})\") #print(ho.find('span', {'class': 'sr-hotel__name'})) if ho.find('span', {'class': 'sr-hotel__name'}) is not", "str(int(DayFim[3:5])) anoFinal = str(int(DayFim[6:10])) ''' Make request to airbnb page", "''' Make request to airbnb page and parse html :param", "+ ';' + distance) #hotels.append(str(len(hotels) + 1) + ' :", "+ ' : ' + name + ' : '", "''' url = 'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms,", "= str(int(DayIni[3:5])) anoInicial = str(int(DayIni[6:10])) diaFinal = str(int(DayFim[0:2])) mesFinal =", "if result[0] in 'km': # distance = result # else:", "'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace(' ', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) +", "t.join() hotels2 = hotels return hotels2 def get_data(rooms=1, country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019',", "are saved.') print('You can find them in', file, 'file') if", "the country to the booking request.', default='0', nargs='?') parser.add_argument(\"--DayIni\", help='Data", "inicial', default='02/01/2019', nargs='?') parser.add_argument(\"--out_format\", help='Add the format for the output", "\"r\") as f: countries = f.read().splitlines() return countries def get_booking_page(session,", "save them in file :return: hotels-in-macedonia.{txt/csv/xlsx} file ''' print('Procurando por',country)", "{'class': 'sr-hotel__name'}) is not None: name = str(ho.find('span', {'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','')", "countries def get_booking_page(session, offset, rooms, country, dest_id, DayIni, DayFim): print('get_booking_page(session,", "}, { 'Pais': 'Utrecht', 'dest_id': '-2154382' }, { 'Pais': 'Buzios',", "else: country = 'Nao Identificado' locais = [d['Pais'] + ':'", "hotels.append(DayIni+';'+DayFim+';'+name + ';' + price + ';' + nota +", "python3.6 import argparse import argcomplete from argcomplete.completers import ChoicesCompleter from", "hotels: set() ''' offset = 1 session = requests.Session() parsed_html", "= '-1' if ho.find('span', {'title': 'This is the straight-line distance", "= 'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace(' ',", "'bui-price-display__value prco-inline-block-maker-helper'}) is not None: price = ho.find('div', {'class': 'bui-price-display__value", "parsed_html.find_all('li', {'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads = [] for i in range(int(all_offset)):", "the country to the booking request.', default='Macedonia', nargs='?').completer = ChoicesCompleter(countries)", "is not None: price = ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else:", "or excel :return: ''' writer = FileWriter(data, out_format, country) file", "save_data(hotels_list , out_format=out_format, country=country) def save_data(data, out_format, country): ''' Saves", "DayIni, DayFim):') print(session, offset, rooms, country, dest_id, DayIni, DayFim) diaInicial", "str(len(result)) # else: # distance = '---' hotels.append(DayIni+';'+DayFim+';'+name + ';'", "import requests from bthread import BookingThread from bs4 import BeautifulSoup", "country, dest_id, DayIni, DayFim): print('get_booking_page(session, offset, rooms, country, dest_id, DayIni,", "in', file, 'file') if __name__ == \"__main__\": parser = argparse.ArgumentParser()", "not None: nota = str(ho.find('span', {'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else : nota", "= str(int(DayFim[6:10])) ''' Make request to airbnb page and parse", "len(countryAux)>0: country = countryAux[0] print('Parametros') print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim,", "name + ' : ' + price) def prep_data(rooms=1, country='Macedonia',", "vary.'}) is not None: distance = str(ho.find('span', {'title': 'This is", "request.', default='Macedonia', nargs='?').completer = ChoicesCompleter(countries) parser.add_argument(\"--dest_id\", help='Add the country to", "can find them in', file, 'file') if __name__ == \"__main__\":", "default='Macedonia', nargs='?').completer = ChoicesCompleter(countries) parser.add_argument(\"--dest_id\", help='Add the country to the", "'&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace(' ', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) + str(offset) r", "'Nao Identificado' locais = [d['Pais'] + ':' + d['dest_id'] for", "for ho in hotel: #print(\"ho.find('a', {'class': 'jq_tooltip'})\") #print(ho.find('a', {'class': 'jq_tooltip'}))", "not None : # result = [str(item) for item in", "result # else: # distance = 'aaaaa' + str(len(result)) #", "from bs4 import BeautifulSoup from file_writer import FileWriter hotels =", "locais = [d['Pais'] + ':' + d['dest_id'] for d in", "default='0', nargs='?') parser.add_argument(\"--DayIni\", help='Data inicial', default='01/01/2019', nargs='?') parser.add_argument(\"--DayFim\", help='Data inicial',", "writer.output_file() print('All accommodations are saved.') print('You can find them in',", "country,dest_id, DayIni, DayFim, out_format) save_data(hotels_list , out_format=out_format, country=country) def save_data(data,", "nargs='?') parser.add_argument(\"--DayFim\", help='Data inicial', default='02/01/2019', nargs='?') parser.add_argument(\"--out_format\", help='Add the format", "nota = '-1' if ho.find('span', {'title': 'This is the straight-line", "rooms, country, dest_id,DayIni, DayFim) hotel = parsed_html.find_all('div', {'class': 'sr_item'}) for", "'Buzios', 'dest_id': '-626254' }, { 'Pais': '', 'dest_id': '' }]", "offset: :return: html page ''' url = 'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\", "ChoicesCompleter from argcomplete.completers import EnvironCompleter import requests from bthread import", "help='Data inicial', default='02/01/2019', nargs='?') parser.add_argument(\"--out_format\", help='Add the format for the", "hotels2 def get_data(rooms=1, country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None): ''' Get all accomodations", "if __name__ == \"__main__\": parser = argparse.ArgumentParser() countries = get_countries()", "ho.find('a', {'class': 'jq_tooltip'})['data-title'] print(\"ho.find('span', {'class': 'sr-hotel__name'})\") #print(ho.find('span', {'class': 'sr-hotel__name'})) if", "all accomodations in Macedonia and save them in file :return:", "parsed_html = BeautifulSoup(html, 'lxml') return parsed_html def process_hotels(session, offset, rooms,", "else: # distance = '----' # if len(result) ==1: #", "i in result: # if i in 'km': # distance", "'excel', 'csv'], nargs='?').completer = EnvironCompleter argcomplete.autocomplete(parser) args = parser.parse_args() localidades", "import ChoicesCompleter from argcomplete.completers import EnvironCompleter import requests from bthread", "+ d['dest_id'] for d in localidades if d['Pais'] != '']", "# else: # distance = '----' # if len(result) ==1:", "threads: t.start() for t in threads: t.join() hotels2 = hotels", "for saving :return: hotels: set() ''' offset = 1 session", "', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) + str(offset) r = requests.get(url, headers= {'User-Agent': 'Mozilla/5.0", "DayFim, out_format) save_data(hotels_list , out_format=out_format, country=country) def save_data(data, out_format, country):", "';' + nota + ';' + distance) #hotels.append(str(len(hotels) + 1)", "+ str(len(result)) # else: # distance = '---' hotels.append(DayIni+';'+DayFim+';'+name +", ":return: html page ''' url = 'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\", "default='01/01/2019', nargs='?') parser.add_argument(\"--DayFim\", help='Data inicial', default='02/01/2019', nargs='?') parser.add_argument(\"--out_format\", help='Add the", "'' }] countryAux = [d['Pais'] for d in localidades if", "{ 'Pais': 'Buzios', 'dest_id': '-626254' }, { 'Pais': '', 'dest_id':", "else: price = '-1' if ho.find('span', {'class': '_ky9opu0'}) is not", "map. Actual travel distance may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else : distance =", "str(int(DayFim[6:10])) ''' Make request to airbnb page and parse html", "str(int(DayIni[0:2])) mesInicial = str(int(DayIni[3:5])) anoInicial = str(int(DayIni[6:10])) diaFinal = str(int(DayFim[0:2]))", "distance = str(ho.find('span', {'title': 'This is the straight-line distance on", "distance = str(i) # else: # distance = '----' #", "list :param out_format: json, csv or excel :return: ''' writer", "nargs='?') parser.add_argument(\"--country\", help='Add the country to the booking request.', default='Macedonia',", "help='Add the country to the booking request.', default='Macedonia', nargs='?').completer =", "{ 'Pais': 'Utrecht', 'dest_id': '-2154382' }, { 'Pais': 'Buzios', 'dest_id':", "if ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}) is not None: price =", "Make request to airbnb page and parse html :param offset:", "= '-1' # if ho.find('a', {'class': 'bui-link'}) is not None", "# distance = '---' hotels.append(DayIni+';'+DayFim+';'+name + ';' + price +", "EnvironCompleter import requests from bthread import BookingThread from bs4 import", "DayFim, process_hotels) threads.append(t) for t in threads: t.start() for t", "DayFim='02/01/2019', out_format=None): ''' Prepare data for saving :return: hotels: set()", "threads = [] for i in range(int(all_offset)): offset += 1", "DayFim):') print(session, offset, rooms, country, dest_id, DayIni, DayFim) diaInicial =", "= get_booking_page(session, offset, rooms, country, dest_id,DayIni, DayFim) hotel = parsed_html.find_all('div',", "country,dest_id,DayIni, DayFim, process_hotels) threads.append(t) for t in threads: t.start() for", "in file :param data: hotels list :param out_format: json, csv", "localidades if args.dest_id in d['dest_id']] if len(countryAux)>0: country = countryAux[0]", "= str(int(DayIni[6:10])) diaFinal = str(int(DayFim[0:2])) mesFinal = str(int(DayFim[3:5])) anoFinal =", "the output file. Add excel, json or csv.', default='json', choices=['json',", "help='Add the number of rooms to the booking request.', default=1,", "out_format: json, csv or excel :return: ''' writer = FileWriter(data,", "'-2154382' }, { 'Pais': 'Buzios', 'dest_id': '-626254' }, { 'Pais':", "== \"__main__\": parser = argparse.ArgumentParser() countries = get_countries() parser.add_argument(\"--rooms\", help='Add", "'bui-link'}) is not None : # result = [str(item) for", "# result = [str(item) for item in ho.find_all('span', attrs={'data-bui-component' :", "in localidades if d['Pais'] != ''] print('----------') print('Utilize uma das", "= get_booking_page(session, offset, rooms, country, dest_id, DayIni,DayFim) all_offset = parsed_html.find_all('li',", "'----' # if len(result) ==1: # if result[0] in 'km':", "anoFinal = str(int(DayFim[6:10])) ''' Make request to airbnb page and", "straight-line distance on the map. Actual travel distance may vary.'})", "nargs='?') parser.add_argument(\"--DayIni\", help='Data inicial', default='01/01/2019', nargs='?') parser.add_argument(\"--DayFim\", help='Data inicial', default='02/01/2019',", "file = writer.output_file() print('All accommodations are saved.') print('You can find", "= str(ho.find('span', {'title': 'This is the straight-line distance on the", "if ho.find('span', {'class': '_ky9opu0'}) is not None: nota = str(ho.find('span',", "else: # distance = 'aaaaa' + str(len(result)) # else: #", "FileWriter hotels = [] def get_countries(): with open(\"europa2020.txt\", \"r\") as", "distance = '----' # else: # distance = '----' #", "+ name + ' : ' + price) def prep_data(rooms=1,", "import argcomplete from argcomplete.completers import ChoicesCompleter from argcomplete.completers import EnvironCompleter", "return countries def get_booking_page(session, offset, rooms, country, dest_id, DayIni, DayFim):", "''' print('Procurando por',country) hotels_list = prep_data(rooms, country,dest_id, DayIni, DayFim, out_format)", "parser.add_argument(\"--DayFim\", help='Data inicial', default='02/01/2019', nargs='?') parser.add_argument(\"--out_format\", help='Add the format for", "'Pais': 'Buzios', 'dest_id': '-626254' }, { 'Pais': '', 'dest_id': ''", "'dest_id': '' }] countryAux = [d['Pais'] for d in localidades", "get_data(rooms=1, country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None): ''' Get all accomodations in Macedonia", "[] def get_countries(): with open(\"europa2020.txt\", \"r\") as f: countries =", "= parsed_html.find_all('li', {'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads = [] for i in", "else: # distance = '----' # else: # distance =", "DayIni, DayFim, out_format) save_data(hotels_list , out_format=out_format, country=country) def save_data(data, out_format,", "'Pais': '', 'dest_id': '' }] countryAux = [d['Pais'] for d", "else : nota = '-1' if ho.find('span', {'title': 'This is", "countryAux = [d['Pais'] for d in localidades if args.dest_id in", "= [] for i in range(int(all_offset)): offset += 1 t", "print('get_booking_page(session, offset, rooms, country, dest_id, DayIni, DayFim):') print(session, offset, rooms,", "dest_id, DayIni, DayFim): parsed_html = get_booking_page(session, offset, rooms, country, dest_id,DayIni,", "argcomplete from argcomplete.completers import ChoicesCompleter from argcomplete.completers import EnvironCompleter import", "is not None: name = str(ho.find('span', {'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else: name", "return parsed_html def process_hotels(session, offset, rooms, country, dest_id, DayIni, DayFim):", "print('All accommodations are saved.') print('You can find them in', file,", ": ' + name + ' : ' + price)", "from file_writer import FileWriter hotels = [] def get_countries(): with", "file :return: hotels-in-macedonia.{txt/csv/xlsx} file ''' print('Procurando por',country) hotels_list = prep_data(rooms,", "nargs='?').completer = EnvironCompleter argcomplete.autocomplete(parser) args = parser.parse_args() localidades = [{", "d in localidades if args.dest_id in d['dest_id']] if len(countryAux)>0: country", "<gh_stars>1-10 #! /usr/bin/env python3.6 import argparse import argcomplete from argcomplete.completers", "'jq_tooltip'})\") #print(ho.find('a', {'class': 'jq_tooltip'})) #name = ho.find('a', {'class': 'jq_tooltip'})['data-title'] print(\"ho.find('span',", "the booking request.', default='0', nargs='?') parser.add_argument(\"--DayIni\", help='Data inicial', default='01/01/2019', nargs='?')", "country,args.dest_id,args.DayIni,args.DayFim, args.out_format) get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) else: country = 'Nao Identificado'", "return hotels2 def get_data(rooms=1, country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None): ''' Get all", "file :param data: hotels list :param out_format: json, csv or", "[d['Pais'] + ':' + d['dest_id'] for d in localidades if", "hotel = parsed_html.find_all('div', {'class': 'sr_item'}) for ho in hotel: #print(\"ho.find('a',", "+= 1 t = BookingThread(session, offset, rooms, country,dest_id,DayIni, DayFim, process_hotels)", "parser.add_argument(\"--out_format\", help='Add the format for the output file. Add excel,", "' Gecko/20100101 Firefox/48.0'}) html = r.content print(url) parsed_html = BeautifulSoup(html,", "FileWriter(data, out_format, country) file = writer.output_file() print('All accommodations are saved.')", "get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) else: country = 'Nao Identificado' locais =", "uma das seguintes localizações') for i in locais: print(i) print('----------')", "Firefox/48.0'}) html = r.content print(url) parsed_html = BeautifulSoup(html, 'lxml') return", "None: distance = str(ho.find('span', {'title': 'This is the straight-line distance", "'csv'], nargs='?').completer = EnvironCompleter argcomplete.autocomplete(parser) args = parser.parse_args() localidades =", "= ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else: price = '-1' if", "for i in result: # print(i) # for i in", ":param offset: :return: html page ''' url = 'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\", "def get_countries(): with open(\"europa2020.txt\", \"r\") as f: countries = f.read().splitlines()", "default='02/01/2019', nargs='?') parser.add_argument(\"--out_format\", help='Add the format for the output file.", "Win64; x64; rv:47.0)' ' Gecko/20100101 Firefox/48.0'}) html = r.content print(url)", "= str(i) # else: # distance = '----' # else:", "#print(ho.find('a', {'class': 'jq_tooltip'})) #name = ho.find('a', {'class': 'jq_tooltip'})['data-title'] print(\"ho.find('span', {'class':", "= EnvironCompleter argcomplete.autocomplete(parser) args = parser.parse_args() localidades = [{ 'Pais':", "t in threads: t.start() for t in threads: t.join() hotels2", "hotels-in-macedonia.{txt/csv/xlsx} file ''' print('Procurando por',country) hotels_list = prep_data(rooms, country,dest_id, DayIni,", "requests from bthread import BookingThread from bs4 import BeautifulSoup from", "parsed_html = get_booking_page(session, offset, rooms, country, dest_id,DayIni, DayFim) hotel =", "straight-line distance on the map. Actual travel distance may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','')", "}, { 'Pais': 'Buzios', 'dest_id': '-626254' }, { 'Pais': '',", "countryAux[0] print('Parametros') print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) else: country", "result = [str(item) for item in ho.find_all('span', attrs={'data-bui-component' : 'Tooltip'})]", "def prep_data(rooms=1, country='Macedonia', dest_id='-1', DayIni='01/01/2019', DayFim='02/01/2019', out_format=None): ''' Prepare data", "!= ''] print('----------') print('Utilize uma das seguintes localizações') for i", "ho.find('span', {'title': 'This is the straight-line distance on the map.", "str(int(DayIni[3:5])) anoInicial = str(int(DayIni[6:10])) diaFinal = str(int(DayFim[0:2])) mesFinal = str(int(DayFim[3:5]))", "result: # if i in 'km': # distance = str(i)", "mesInicial = str(int(DayIni[3:5])) anoInicial = str(int(DayIni[6:10])) diaFinal = str(int(DayFim[0:2])) mesFinal", "help='Data inicial', default='01/01/2019', nargs='?') parser.add_argument(\"--DayFim\", help='Data inicial', default='02/01/2019', nargs='?') parser.add_argument(\"--out_format\",", "if ho.find('span', {'class': 'sr-hotel__name'}) is not None: name = str(ho.find('span',", "if args.dest_id in d['dest_id']] if len(countryAux)>0: country = countryAux[0] print('Parametros')", "the booking request.', default='Macedonia', nargs='?').completer = ChoicesCompleter(countries) parser.add_argument(\"--dest_id\", help='Add the", "parser.add_argument(\"--DayIni\", help='Data inicial', default='01/01/2019', nargs='?') parser.add_argument(\"--DayFim\", help='Data inicial', default='02/01/2019', nargs='?')", "'dest_id': '-2601889' }, { 'Pais': 'Utrecht', 'dest_id': '-2154382' }, {", "inicial', default='01/01/2019', nargs='?') parser.add_argument(\"--DayFim\", help='Data inicial', default='02/01/2019', nargs='?') parser.add_argument(\"--out_format\", help='Add", "rooms, country, dest_id, DayIni, DayFim):') print(session, offset, rooms, country, dest_id,", "not None: price = ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else: price", "= 1 session = requests.Session() parsed_html = get_booking_page(session, offset, rooms,", "country, dest_id, DayIni, DayFim):') print(session, offset, rooms, country, dest_id, DayIni,", "{'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else : nota = '-1' if ho.find('span', {'title':", "str(int(DayFim[0:2])) mesFinal = str(int(DayFim[3:5])) anoFinal = str(int(DayFim[6:10])) ''' Make request", "name = '-1' if ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}) is not", "str(ho.find('span', {'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else: name = '-1' if ho.find('div', {'class':", "ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}) is not None: price = ho.find('div',", "'dest_id': '-2154382' }, { 'Pais': 'Buzios', 'dest_id': '-626254' }, {", "# if result[0] in 'km': # distance = result #", "else: # distance = '---' hotels.append(DayIni+';'+DayFim+';'+name + ';' + price", "the map. Actual travel distance may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else : distance", "' + name + ' : ' + price) def", "= [str(item) for item in ho.find_all('span', attrs={'data-bui-component' : 'Tooltip'})] #", "request.', default=1, type=int, nargs='?') parser.add_argument(\"--country\", help='Add the country to the", "the map. Actual travel distance may vary.'}) is not None:", "the format for the output file. Add excel, json or", "rooms to the booking request.', default=1, type=int, nargs='?') parser.add_argument(\"--country\", help='Add", "out_format=None): ''' Prepare data for saving :return: hotels: set() '''", "offset, rooms, country, dest_id, DayIni, DayFim): parsed_html = get_booking_page(session, offset,", "if ho.find('a', {'class': 'bui-link'}) is not None : # result", "= result # else: # distance = 'aaaaa' + str(len(result))", "= str(int(DayFim[0:2])) mesFinal = str(int(DayFim[3:5])) anoFinal = str(int(DayFim[6:10])) ''' Make", "# for i in result: # if i in 'km':", "print(i) # for i in result: # if i in", "d['dest_id'] for d in localidades if d['Pais'] != ''] print('----------')", "# distance = '----' # if len(result) ==1: # if", "url = 'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace('", "get_booking_page(session, offset, rooms, country, dest_id,DayIni, DayFim) hotel = parsed_html.find_all('div', {'class':", "request.', default='0', nargs='?') parser.add_argument(\"--DayIni\", help='Data inicial', default='01/01/2019', nargs='?') parser.add_argument(\"--DayFim\", help='Data", "result[0] in 'km': # distance = result # else: #", "price = ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\") else: price = '-1'", "= '-1' if ho.find('span', {'class': '_ky9opu0'}) is not None: nota", "out_format, country): ''' Saves hotels list in file :param data:", "not None: distance = str(ho.find('span', {'title': 'This is the straight-line", "NT 6.1; Win64; x64; rv:47.0)' ' Gecko/20100101 Firefox/48.0'}) html =", "= ho.find('a', {'class': 'jq_tooltip'})['data-title'] print(\"ho.find('span', {'class': 'sr-hotel__name'})\") #print(ho.find('span', {'class': 'sr-hotel__name'}))", "hotels return hotels2 def get_data(rooms=1, country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None): ''' Get", "print(url) parsed_html = BeautifulSoup(html, 'lxml') return parsed_html def process_hotels(session, offset,", "'---' hotels.append(DayIni+';'+DayFim+';'+name + ';' + price + ';' + nota", "saving :return: hotels: set() ''' offset = 1 session =", "rooms, country,dest_id,DayIni, DayFim, process_hotels) threads.append(t) for t in threads: t.start()", "nota + ';' + distance) #hotels.append(str(len(hotels) + 1) + '", "'This is the straight-line distance on the map. Actual travel", "if i in 'km': # distance = str(i) # else:", "1) + ' : ' + name + ' :", "'-1' # if ho.find('a', {'class': 'bui-link'}) is not None :", "# if i in 'km': # distance = str(i) #", "DayIni, DayFim) diaInicial = str(int(DayIni[0:2])) mesInicial = str(int(DayIni[3:5])) anoInicial =", ": # result = [str(item) for item in ho.find_all('span', attrs={'data-bui-component'", "'London', 'dest_id': '-2601889' }, { 'Pais': 'Utrecht', 'dest_id': '-2154382' },", "BookingThread from bs4 import BeautifulSoup from file_writer import FileWriter hotels", "def get_data(rooms=1, country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None): ''' Get all accomodations in", "'Pais': 'Utrecht', 'dest_id': '-2154382' }, { 'Pais': 'Buzios', 'dest_id': '-626254'", "page ''' url = 'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\", "print('TAMANHO TOOLTIP', str(len(result))) # for i in result: # print(i)", "distance on the map. Actual travel distance may vary.'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else", "TOOLTIP', str(len(result))) # for i in result: # print(i) #", "ho in hotel: #print(\"ho.find('a', {'class': 'jq_tooltip'})\") #print(ho.find('a', {'class': 'jq_tooltip'})) #name", "rv:47.0)' ' Gecko/20100101 Firefox/48.0'}) html = r.content print(url) parsed_html =", "data for saving :return: hotels: set() ''' offset = 1", "distance = '---' hotels.append(DayIni+';'+DayFim+';'+name + ';' + price + ';'", "format for the output file. Add excel, json or csv.',", "'-1' if ho.find('span', {'class': '_ky9opu0'}) is not None: nota =", "args.out_format) get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) else: country = 'Nao Identificado' locais", "Add excel, json or csv.', default='json', choices=['json', 'excel', 'csv'], nargs='?').completer", "csv.', default='json', choices=['json', 'excel', 'csv'], nargs='?').completer = EnvironCompleter argcomplete.autocomplete(parser) args", "offset, rooms, country, dest_id,DayIni, DayFim) hotel = parsed_html.find_all('div', {'class': 'sr_item'})", "str(ho.find('span', {'title': 'This is the straight-line distance on the map.", "countries = f.read().splitlines() return countries def get_booking_page(session, offset, rooms, country,", "parse html :param offset: :return: html page ''' url =", "'_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else : nota = '-1' if ho.find('span', {'title': 'This", "request to airbnb page and parse html :param offset: :return:", "open(\"europa2020.txt\", \"r\") as f: countries = f.read().splitlines() return countries def", "#hotels.append(str(len(hotels) + 1) + ' : ' + name +", "argparse import argcomplete from argcomplete.completers import ChoicesCompleter from argcomplete.completers import", "offset = 1 session = requests.Session() parsed_html = get_booking_page(session, offset,", "file, 'file') if __name__ == \"__main__\": parser = argparse.ArgumentParser() countries", "DayIni,DayFim) all_offset = parsed_html.find_all('li', {'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads = [] for", "'', 'dest_id': '' }] countryAux = [d['Pais'] for d in", "for d in localidades if args.dest_id in d['dest_id']] if len(countryAux)>0:", "import BookingThread from bs4 import BeautifulSoup from file_writer import FileWriter", "= parser.parse_args() localidades = [{ 'Pais': 'London', 'dest_id': '-2601889' },", "Get all accomodations in Macedonia and save them in file", "session = requests.Session() parsed_html = get_booking_page(session, offset, rooms, country, dest_id,", "country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None): ''' Get all accomodations in Macedonia and", "in 'km': # distance = result # else: # distance", "print('Parametros') print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) get_data(args.rooms, country,args.dest_id,args.DayIni,args.DayFim, args.out_format) else: country =", "'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace(' ', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id)", "# for i in result: # print(i) # for i", "= requests.get(url, headers= {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64;", "html page ''' url = 'https://www.airbnb.com.br/s/Londres/'\\ 'homes?refinement_paths%5B%5D=%2Fhomes&current_tab_id=home_tab&selected_tab_id=home_tab&source=mc_search_bar&search_type=unknown'\\ '&click_referer=t%3ASEE_ALL%7Csid%3A874f16ee-6196-4289-9717-17dec73e1e5c%7Cst%3AMAGAZINE_HOMES&screen_size=large&hide_dates_and_guests_filters=false'\\ '&ne_lat=51.80546533345978&ne_lng=0.4969575708007312&sw_lat=51.17528882051496&sw_lng=-0.8200285131836154&zoom=10&search_by_map=false&checkin={anoInicial}-{mesInicial}-{diaInicial}'\\ '&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\", "dest_id, DayIni,DayFim) all_offset = parsed_html.find_all('li', {'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads = []", "{'class': 'bui-link'}) is not None : # result = [str(item)", "diaFinal = str(int(DayFim[0:2])) mesFinal = str(int(DayFim[3:5])) anoFinal = str(int(DayFim[6:10])) '''", "d['Pais'] != ''] print('----------') print('Utilize uma das seguintes localizações') for", "the straight-line distance on the map. Actual travel distance may", "None: name = str(ho.find('span', {'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else: name = '-1'", "price) def prep_data(rooms=1, country='Macedonia', dest_id='-1', DayIni='01/01/2019', DayFim='02/01/2019', out_format=None): ''' Prepare", ": distance = '-1' # if ho.find('a', {'class': 'bui-link'}) is", "them in', file, 'file') if __name__ == \"__main__\": parser =", "process_hotels) threads.append(t) for t in threads: t.start() for t in", "'lxml') return parsed_html def process_hotels(session, offset, rooms, country, dest_id, DayIni,", "not None: name = str(ho.find('span', {'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else: name =", "''] print('----------') print('Utilize uma das seguintes localizações') for i in", "saved.') print('You can find them in', file, 'file') if __name__", "dest_id, DayIni, DayFim) diaInicial = str(int(DayIni[0:2])) mesInicial = str(int(DayIni[3:5])) anoInicial", "hotels = [] def get_countries(): with open(\"europa2020.txt\", \"r\") as f:", "'jq_tooltip'})['data-title'] print(\"ho.find('span', {'class': 'sr-hotel__name'})\") #print(ho.find('span', {'class': 'sr-hotel__name'})) if ho.find('span', {'class':", "# else: # distance = 'aaaaa' + str(len(result)) # else:", "[{ 'Pais': 'London', 'dest_id': '-2601889' }, { 'Pais': 'Utrecht', 'dest_id':", "{ 'Pais': '', 'dest_id': '' }] countryAux = [d['Pais'] for", "get_countries(): with open(\"europa2020.txt\", \"r\") as f: countries = f.read().splitlines() return", "them in file :return: hotels-in-macedonia.{txt/csv/xlsx} file ''' print('Procurando por',country) hotels_list", "\"__main__\": parser = argparse.ArgumentParser() countries = get_countries() parser.add_argument(\"--rooms\", help='Add the", "of rooms to the booking request.', default=1, type=int, nargs='?') parser.add_argument(\"--country\",", "{'class': 'jq_tooltip'})['data-title'] print(\"ho.find('span', {'class': 'sr-hotel__name'})\") #print(ho.find('span', {'class': 'sr-hotel__name'})) if ho.find('span',", "csv or excel :return: ''' writer = FileWriter(data, out_format, country)", "from bthread import BookingThread from bs4 import BeautifulSoup from file_writer", "= f.read().splitlines() return countries def get_booking_page(session, offset, rooms, country, dest_id,", "x64; rv:47.0)' ' Gecko/20100101 Firefox/48.0'}) html = r.content print(url) parsed_html", "# if ho.find('a', {'class': 'bui-link'}) is not None : #", "prep_data(rooms, country,dest_id, DayIni, DayFim, out_format) save_data(hotels_list , out_format=out_format, country=country) def", ":param out_format: json, csv or excel :return: ''' writer =", "';' + distance) #hotels.append(str(len(hotels) + 1) + ' : '", "= str(int(DayIni[0:2])) mesInicial = str(int(DayIni[3:5])) anoInicial = str(int(DayIni[6:10])) diaFinal =", "argcomplete.completers import ChoicesCompleter from argcomplete.completers import EnvironCompleter import requests from", "countries = get_countries() parser.add_argument(\"--rooms\", help='Add the number of rooms to", "ho.find('span', {'class': '_ky9opu0'}) is not None: nota = str(ho.find('span', {'class':", "rooms, country, dest_id, DayIni, DayFim): parsed_html = get_booking_page(session, offset, rooms,", "DayFim): print('get_booking_page(session, offset, rooms, country, dest_id, DayIni, DayFim):') print(session, offset,", "+ price + ';' + nota + ';' + distance)", "headers= {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0)' '", "+ nota + ';' + distance) #hotels.append(str(len(hotels) + 1) +", "Gecko/20100101 Firefox/48.0'}) html = r.content print(url) parsed_html = BeautifulSoup(html, 'lxml')", "+ distance) #hotels.append(str(len(hotels) + 1) + ' : ' +", "{'class': 'sr_item'}) for ho in hotel: #print(\"ho.find('a', {'class': 'jq_tooltip'})\") #print(ho.find('a',", "#name = ho.find('a', {'class': 'jq_tooltip'})['data-title'] print(\"ho.find('span', {'class': 'sr-hotel__name'})\") #print(ho.find('span', {'class':", "with open(\"europa2020.txt\", \"r\") as f: countries = f.read().splitlines() return countries", "'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else: name = '-1' if ho.find('div', {'class': 'bui-price-display__value prco-inline-block-maker-helper'})", "= 'aaaaa' + str(len(result)) # else: # distance = '---'", "':' + d['dest_id'] for d in localidades if d['Pais'] !=", "= [d['Pais'] for d in localidades if args.dest_id in d['dest_id']]", "country = 'Nao Identificado' locais = [d['Pais'] + ':' +", "{'class': 'sr-hotel__name'})\") #print(ho.find('span', {'class': 'sr-hotel__name'})) if ho.find('span', {'class': 'sr-hotel__name'}) is", "nota = str(ho.find('span', {'class': '_ky9opu0'}).text.replace('\\n','').replace(\"b\",\"\").replace(\"'\",\"\")) else : nota = '-1'", "= get_countries() parser.add_argument(\"--rooms\", help='Add the number of rooms to the", "dest_id,DayIni, DayFim) hotel = parsed_html.find_all('div', {'class': 'sr_item'}) for ho in", "html :param offset: :return: html page ''' url = 'https://www.airbnb.com.br/s/Londres/'\\", "r.content print(url) parsed_html = BeautifulSoup(html, 'lxml') return parsed_html def process_hotels(session,", "in hotel: #print(\"ho.find('a', {'class': 'jq_tooltip'})\") #print(ho.find('a', {'class': 'jq_tooltip'})) #name =", "localidades if d['Pais'] != ''] print('----------') print('Utilize uma das seguintes", "def get_booking_page(session, offset, rooms, country, dest_id, DayIni, DayFim): print('get_booking_page(session, offset,", "# distance = str(i) # else: # distance = '----'", "rooms, country, dest_id, DayIni, DayFim) diaInicial = str(int(DayIni[0:2])) mesInicial =", "all_offset = parsed_html.find_all('li', {'class': 'sr_pagination_item'})[-1].get_text().splitlines()[-1] threads = [] for i", "def save_data(data, out_format, country): ''' Saves hotels list in file", "{'class': 'sr-hotel__name'}).text.encode('utf-8')).replace('\\\\n','').replace(\"b\",\"\").replace(\"'\",\"\").replace('\\\\','') else: name = '-1' if ho.find('div', {'class': 'bui-price-display__value", "map. Actual travel distance may vary.'}) is not None: distance", "get_booking_page(session, offset, rooms, country, dest_id, DayIni,DayFim) all_offset = parsed_html.find_all('li', {'class':", "DayFim) diaInicial = str(int(DayIni[0:2])) mesInicial = str(int(DayIni[3:5])) anoInicial = str(int(DayIni[6:10]))", "country='Macedonia', dest_id='-1', DayIni='01/01/2019', DayFim='02/01/2019', out_format=None): ''' Prepare data for saving", "+ 1) + ' : ' + name + '", "hotels2 = hotels return hotels2 def get_data(rooms=1, country='Macedonia', dest_id='-1',DayIni='01/01/2019',DayFim='02/01/2019', out_format=None):", "Saves hotels list in file :param data: hotels list :param", "DayFim) hotel = parsed_html.find_all('div', {'class': 'sr_item'}) for ho in hotel:", "in d['dest_id']] if len(countryAux)>0: country = countryAux[0] print('Parametros') print(args.rooms, country,args.dest_id,args.DayIni,args.DayFim,", "offset, rooms, country, dest_id, DayIni, DayFim) diaInicial = str(int(DayIni[0:2])) mesInicial", "def process_hotels(session, offset, rooms, country, dest_id, DayIni, DayFim): parsed_html =", "# distance = result # else: # distance = 'aaaaa'", "None : # result = [str(item) for item in ho.find_all('span',", "requests.get(url, headers= {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0)'", "'&checkout={anoFinal}-{mesFinal}-{diaFinal}&adults={rooms}&property_type_id%5B%5D=1&property_type_id%5B%5D=43&property_type_id%5B%5D=47'\\ '&place_id=ChIJdd4hrwug2EcRmSrV3Vo6llI&room_types%5B%5D=Entire%20home%2Fapt'\\ '&section_offset=6&items_offset=18'.format(rooms=rooms, country=country.replace(' ', '+'),anoFinal=anoFinal,mesFinal=mesFinal,diaInicial=diaInicial,mesInicial=mesInicial,anoInicial=anoInicial,diaFinal=diaFinal,dest_id=dest_id) + str(offset) r =", "'-1' if ho.find('span', {'title': 'This is the straight-line distance on" ]
[ "# if user: # self.response.headers['Content-Type'] = 'text/plain' # self.response.write('Hello, '", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# class GetUser(webapp2.RequestHandler): def get(self): self.response.out.write('<html><body>') client_id = self.request.get('client_id') ancestor_key", "eitthvad') for userid in userids: self.response.out.write('<blockquote>%s</blockquote>' % cgi.escape(userid.content)) # Checks", "for active Google account session # user = users.get_current_user() #", "HasData(webapp2.RequestHandler): def get(self): pass #TODO does user have data class", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "import ndb class UserId(ndb.Model): content = ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True)", "ndb class UserId(ndb.Model): content = ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True) @classmethod", "ancestor_key = ndb.Key(\"ID\", client_id or \"*no_id*\") userids = UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her", "# user = users.get_current_user() # if user: # self.response.headers['Content-Type'] =", "import webapp2 # For datastore import cgi import urllib from", "google.appengine.ext import ndb class UserId(ndb.Model): content = ndb.StringProperty() date =", "ndb.Key(\"ID\", client_id or \"*no_id*\") userids = UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her er eitthvad')", "distributed under the License is distributed on an \"AS IS\"", "************* # class HasData(webapp2.RequestHandler): def get(self): pass #TODO does user", "def query_user(cls, ancestor_key): return cls.query(ancestor=ancestor_key).order(-cls.date) # ************** MainHandler ************* #", "client_id or \"*no_id*\") userids = UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her er eitthvad') for", "# property user.email() or user.user_id() app = webapp2.WSGIApplication([ ('/', MainHandler),", "For datastore import cgi import urllib from google.appengine.ext import ndb", "import urllib from google.appengine.ext import ndb class UserId(ndb.Model): content =", "cls.query(ancestor=ancestor_key).order(-cls.date) # ************** MainHandler ************* # class MainHandler(webapp2.RequestHandler): def get(self):", "user.nickname()) # else: # self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>') def post(self): pass #", "the specific language governing permissions and # limitations under the", "= 'text/plain' # self.response.write('Hello, ' + user.nickname()) # else: #", "def post(self): pass # ************** HasData ************* # class HasData(webapp2.RequestHandler):", "client class GetSyncData(object): \"\"\"docstring for GetSyncData\"\"\" def __init__(self, arg): super(GetSyncData,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "def get(self): self.response.out.write('<html><body>') client_id = self.request.get('client_id') ancestor_key = ndb.Key(\"ID\", client_id", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "webapp2 # For datastore import cgi import urllib from google.appengine.ext", "except in compliance with the License. # You may obtain", "chrome_user.put() #TODO recieve data from client class GetSyncData(object): \"\"\"docstring for", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "users.get_current_user() # if user: # self.response.headers['Content-Type'] = 'text/plain' # self.response.write('Hello,", "if user: # self.response.headers['Content-Type'] = 'text/plain' # self.response.write('Hello, ' +", "# ************** HasData ************* # class HasData(webapp2.RequestHandler): def get(self): pass", "webapp2.WSGIApplication([ ('/', MainHandler), ('/GetUser/', GetUser), ('/HasData/', HasData), ('/chrome-sync/command/', PostData), ('/GetSyncData/',", "self.request.get('client_id') ancestor_key = ndb.Key(\"ID\", client_id or \"*no_id*\") userids = UserId.query_user(ancestor_key).fetch(20)", "def post(self): client_id = self.request.get('client_id') chrome_user = UserId(parent=ndb.Key(\"ID\", client_id or", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "************** HasData ************* # class HasData(webapp2.RequestHandler): def get(self): pass #TODO", "not use this file except in compliance with the License.", "# Copyright 2007 Google Inc. # # Licensed under the", "or user.user_id() app = webapp2.WSGIApplication([ ('/', MainHandler), ('/GetUser/', GetUser), ('/HasData/',", "class PostData(webapp2.RequestHandler): def post(self): client_id = self.request.get('client_id') chrome_user = UserId(parent=ndb.Key(\"ID\",", "return cls.query(ancestor=ancestor_key).order(-cls.date) # ************** MainHandler ************* # class MainHandler(webapp2.RequestHandler): def", "Copyright 2007 Google Inc. # # Licensed under the Apache", "@classmethod def query_user(cls, ancestor_key): return cls.query(ancestor=ancestor_key).order(-cls.date) # ************** MainHandler *************", "writing, software # distributed under the License is distributed on", "GetUser ************* # class GetUser(webapp2.RequestHandler): def get(self): self.response.out.write('<html><body>') client_id =", "pass # ************** HasData ************* # class HasData(webapp2.RequestHandler): def get(self):", "in writing, software # distributed under the License is distributed", "recieve data from client class GetSyncData(object): \"\"\"docstring for GetSyncData\"\"\" def", "# ************** MainHandler ************* # class MainHandler(webapp2.RequestHandler): def get(self): self.response.write('Hello", "#TODO does user have data class PostData(webapp2.RequestHandler): def post(self): client_id", "you may not use this file except in compliance with", "datastore import cgi import urllib from google.appengine.ext import ndb class", "= UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her er eitthvad') for userid in userids: self.response.out.write('<blockquote>%s</blockquote>'", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "self.request.get('client_id')) chrome_user.put() #TODO recieve data from client class GetSyncData(object): \"\"\"docstring", "import users import webapp2 # For datastore import cgi import", "# limitations under the License. # from google.appengine.api import users", "world!') # ************** GetUser ************* # class GetUser(webapp2.RequestHandler): def get(self):", "userid in userids: self.response.out.write('<blockquote>%s</blockquote>' % cgi.escape(userid.content)) # Checks for active", "def __init__(self, arg): super(GetSyncData, self).__init__() self.arg = arg #implement get", "post(self): client_id = self.request.get('client_id') chrome_user = UserId(parent=ndb.Key(\"ID\", client_id or \"*no_id*\"),", "class GetUser(webapp2.RequestHandler): def get(self): self.response.out.write('<html><body>') client_id = self.request.get('client_id') ancestor_key =", "data class PostData(webapp2.RequestHandler): def post(self): client_id = self.request.get('client_id') chrome_user =", "users import webapp2 # For datastore import cgi import urllib", "\"*no_id*\"), content = self.request.get('client_id')) chrome_user.put() #TODO recieve data from client", "use this file except in compliance with the License. #", "= ndb.Key(\"ID\", client_id or \"*no_id*\") userids = UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her er", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "get data for user # property user.email() or user.user_id() app", "MainHandler ************* # class MainHandler(webapp2.RequestHandler): def get(self): self.response.write('Hello world!') #", "Checks for active Google account session # user = users.get_current_user()", "content = self.request.get('client_id')) chrome_user.put() #TODO recieve data from client class", "python # # Copyright 2007 Google Inc. # # Licensed", "************* # class MainHandler(webapp2.RequestHandler): def get(self): self.response.write('Hello world!') # **************", "data from client class GetSyncData(object): \"\"\"docstring for GetSyncData\"\"\" def __init__(self,", "CONDITIONS OF ANY KIND, either express or implied. # See", "#TODO recieve data from client class GetSyncData(object): \"\"\"docstring for GetSyncData\"\"\"", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "super(GetSyncData, self).__init__() self.arg = arg #implement get data for user", "client_id or \"*no_id*\"), content = self.request.get('client_id')) chrome_user.put() #TODO recieve data", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "= ndb.DateTimeProperty(auto_now_add=True) @classmethod def query_user(cls, ancestor_key): return cls.query(ancestor=ancestor_key).order(-cls.date) # **************", "self).__init__() self.arg = arg #implement get data for user #", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "class HasData(webapp2.RequestHandler): def get(self): pass #TODO does user have data", "self.arg = arg #implement get data for user # property", "# You may obtain a copy of the License at", "date = ndb.DateTimeProperty(auto_now_add=True) @classmethod def query_user(cls, ancestor_key): return cls.query(ancestor=ancestor_key).order(-cls.date) #", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "__init__(self, arg): super(GetSyncData, self).__init__() self.arg = arg #implement get data", "for GetSyncData\"\"\" def __init__(self, arg): super(GetSyncData, self).__init__() self.arg = arg", "cgi import urllib from google.appengine.ext import ndb class UserId(ndb.Model): content", "get(self): self.response.out.write('<html><body>') client_id = self.request.get('client_id') ancestor_key = ndb.Key(\"ID\", client_id or", "= users.get_current_user() # if user: # self.response.headers['Content-Type'] = 'text/plain' #", "under the License is distributed on an \"AS IS\" BASIS,", "self.response.write('Hello world!') # ************** GetUser ************* # class GetUser(webapp2.RequestHandler): def", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True) @classmethod def query_user(cls, ancestor_key): return cls.query(ancestor=ancestor_key).order(-cls.date)", "License for the specific language governing permissions and # limitations", "google.appengine.api import users import webapp2 # For datastore import cgi", "else: # self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>') def post(self): pass # ************** HasData", "= arg #implement get data for user # property user.email()", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "post(self): pass # ************** HasData ************* # class HasData(webapp2.RequestHandler): def", "ancestor_key): return cls.query(ancestor=ancestor_key).order(-cls.date) # ************** MainHandler ************* # class MainHandler(webapp2.RequestHandler):", "class MainHandler(webapp2.RequestHandler): def get(self): self.response.write('Hello world!') # ************** GetUser *************", "Google Inc. # # Licensed under the Apache License, Version", "class UserId(ndb.Model): content = ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True) @classmethod def", "\"\"\"docstring for GetSyncData\"\"\" def __init__(self, arg): super(GetSyncData, self).__init__() self.arg =", "user.email() or user.user_id() app = webapp2.WSGIApplication([ ('/', MainHandler), ('/GetUser/', GetUser),", "user.user_id() app = webapp2.WSGIApplication([ ('/', MainHandler), ('/GetUser/', GetUser), ('/HasData/', HasData),", "UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her er eitthvad') for userid in userids: self.response.out.write('<blockquote>%s</blockquote>' %", "does user have data class PostData(webapp2.RequestHandler): def post(self): client_id =", "from google.appengine.api import users import webapp2 # For datastore import", "MainHandler(webapp2.RequestHandler): def get(self): self.response.write('Hello world!') # ************** GetUser ************* #", "= UserId(parent=ndb.Key(\"ID\", client_id or \"*no_id*\"), content = self.request.get('client_id')) chrome_user.put() #TODO", "('/GetUser/', GetUser), ('/HasData/', HasData), ('/chrome-sync/command/', PostData), ('/GetSyncData/', GetSyncData) ], debug=True)", "the License for the specific language governing permissions and #", "def get(self): pass #TODO does user have data class PostData(webapp2.RequestHandler):", "(the \"License\"); # you may not use this file except", "Apache License, Version 2.0 (the \"License\"); # you may not", "client_id = self.request.get('client_id') ancestor_key = ndb.Key(\"ID\", client_id or \"*no_id*\") userids", "arg #implement get data for user # property user.email() or", "# you may not use this file except in compliance", "# class MainHandler(webapp2.RequestHandler): def get(self): self.response.write('Hello world!') # ************** GetUser", "self.response.out.write('her er eitthvad') for userid in userids: self.response.out.write('<blockquote>%s</blockquote>' % cgi.escape(userid.content))", "either express or implied. # See the License for the", "= ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True) @classmethod def query_user(cls, ancestor_key): return", "GetSyncData(object): \"\"\"docstring for GetSyncData\"\"\" def __init__(self, arg): super(GetSyncData, self).__init__() self.arg", "OR CONDITIONS OF ANY KIND, either express or implied. #", "= webapp2.WSGIApplication([ ('/', MainHandler), ('/GetUser/', GetUser), ('/HasData/', HasData), ('/chrome-sync/command/', PostData),", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "the License is distributed on an \"AS IS\" BASIS, #", "account session # user = users.get_current_user() # if user: #", "property user.email() or user.user_id() app = webapp2.WSGIApplication([ ('/', MainHandler), ('/GetUser/',", "in compliance with the License. # You may obtain a", "# self.response.headers['Content-Type'] = 'text/plain' # self.response.write('Hello, ' + user.nickname()) #", "# from google.appengine.api import users import webapp2 # For datastore", "governing permissions and # limitations under the License. # from", "software # distributed under the License is distributed on an", "= self.request.get('client_id') chrome_user = UserId(parent=ndb.Key(\"ID\", client_id or \"*no_id*\"), content =", "MainHandler), ('/GetUser/', GetUser), ('/HasData/', HasData), ('/chrome-sync/command/', PostData), ('/GetSyncData/', GetSyncData) ],", "# self.response.write('Hello, ' + user.nickname()) # else: # self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>')", "= self.request.get('client_id') ancestor_key = ndb.Key(\"ID\", client_id or \"*no_id*\") userids =", "def get(self): self.response.write('Hello world!') # ************** GetUser ************* # class", "self.response.write('Hello, ' + user.nickname()) # else: # self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>') def", "limitations under the License. # from google.appengine.api import users import", "data for user # property user.email() or user.user_id() app =", "HasData ************* # class HasData(webapp2.RequestHandler): def get(self): pass #TODO does", "# # Unless required by applicable law or agreed to", "in userids: self.response.out.write('<blockquote>%s</blockquote>' % cgi.escape(userid.content)) # Checks for active Google", "#implement get data for user # property user.email() or user.user_id()", "# For datastore import cgi import urllib from google.appengine.ext import", "# self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>') def post(self): pass # ************** HasData *************", "('/', MainHandler), ('/GetUser/', GetUser), ('/HasData/', HasData), ('/chrome-sync/command/', PostData), ('/GetSyncData/', GetSyncData)", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "user: # self.response.headers['Content-Type'] = 'text/plain' # self.response.write('Hello, ' + user.nickname())", "UserId(ndb.Model): content = ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True) @classmethod def query_user(cls,", "Version 2.0 (the \"License\"); # you may not use this", "# class HasData(webapp2.RequestHandler): def get(self): pass #TODO does user have", "PostData(webapp2.RequestHandler): def post(self): client_id = self.request.get('client_id') chrome_user = UserId(parent=ndb.Key(\"ID\", client_id", "'text/plain' # self.response.write('Hello, ' + user.nickname()) # else: # self.redirect(users.create_login_url(self.request.uri))", "% cgi.escape(userid.content)) # Checks for active Google account session #", "for userid in userids: self.response.out.write('<blockquote>%s</blockquote>' % cgi.escape(userid.content)) # Checks for", "law or agreed to in writing, software # distributed under", "self.response.out.write('<blockquote>%s</blockquote>' % cgi.escape(userid.content)) # Checks for active Google account session", "UserId(parent=ndb.Key(\"ID\", client_id or \"*no_id*\"), content = self.request.get('client_id')) chrome_user.put() #TODO recieve", "self.response.out.write('</body></html>') def post(self): pass # ************** HasData ************* # class", "under the License. # from google.appengine.api import users import webapp2", "app = webapp2.WSGIApplication([ ('/', MainHandler), ('/GetUser/', GetUser), ('/HasData/', HasData), ('/chrome-sync/command/',", "implied. # See the License for the specific language governing", "for user # property user.email() or user.user_id() app = webapp2.WSGIApplication([", "urllib from google.appengine.ext import ndb class UserId(ndb.Model): content = ndb.StringProperty()", "er eitthvad') for userid in userids: self.response.out.write('<blockquote>%s</blockquote>' % cgi.escape(userid.content)) #", "self.response.out.write('<html><body>') client_id = self.request.get('client_id') ancestor_key = ndb.Key(\"ID\", client_id or \"*no_id*\")", "under the Apache License, Version 2.0 (the \"License\"); # you", "ndb.DateTimeProperty(auto_now_add=True) @classmethod def query_user(cls, ancestor_key): return cls.query(ancestor=ancestor_key).order(-cls.date) # ************** MainHandler", "userids: self.response.out.write('<blockquote>%s</blockquote>' % cgi.escape(userid.content)) # Checks for active Google account", "\"License\"); # you may not use this file except in", "' + user.nickname()) # else: # self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>') def post(self):", "class GetSyncData(object): \"\"\"docstring for GetSyncData\"\"\" def __init__(self, arg): super(GetSyncData, self).__init__()", "get(self): self.response.write('Hello world!') # ************** GetUser ************* # class GetUser(webapp2.RequestHandler):", "session # user = users.get_current_user() # if user: # self.response.headers['Content-Type']", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "user = users.get_current_user() # if user: # self.response.headers['Content-Type'] = 'text/plain'", "pass #TODO does user have data class PostData(webapp2.RequestHandler): def post(self):", "self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>') def post(self): pass # ************** HasData ************* #", "permissions and # limitations under the License. # from google.appengine.api", "************** MainHandler ************* # class MainHandler(webapp2.RequestHandler): def get(self): self.response.write('Hello world!')", "active Google account session # user = users.get_current_user() # if", "GetUser(webapp2.RequestHandler): def get(self): self.response.out.write('<html><body>') client_id = self.request.get('client_id') ancestor_key = ndb.Key(\"ID\",", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "# Checks for active Google account session # user =", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "have data class PostData(webapp2.RequestHandler): def post(self): client_id = self.request.get('client_id') chrome_user", "user have data class PostData(webapp2.RequestHandler): def post(self): client_id = self.request.get('client_id')", "self.request.get('client_id') chrome_user = UserId(parent=ndb.Key(\"ID\", client_id or \"*no_id*\"), content = self.request.get('client_id'))", "GetSyncData\"\"\" def __init__(self, arg): super(GetSyncData, self).__init__() self.arg = arg #implement", "content = ndb.StringProperty() date = ndb.DateTimeProperty(auto_now_add=True) @classmethod def query_user(cls, ancestor_key):", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "import cgi import urllib from google.appengine.ext import ndb class UserId(ndb.Model):", "cgi.escape(userid.content)) # Checks for active Google account session # user", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "to in writing, software # distributed under the License is", "arg): super(GetSyncData, self).__init__() self.arg = arg #implement get data for", "the License. # from google.appengine.api import users import webapp2 #", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "# See the License for the specific language governing permissions", "from google.appengine.ext import ndb class UserId(ndb.Model): content = ndb.StringProperty() date", "client_id = self.request.get('client_id') chrome_user = UserId(parent=ndb.Key(\"ID\", client_id or \"*no_id*\"), content", "or \"*no_id*\"), content = self.request.get('client_id')) chrome_user.put() #TODO recieve data from", "You may obtain a copy of the License at #", "self.response.headers['Content-Type'] = 'text/plain' # self.response.write('Hello, ' + user.nickname()) # else:", "+ user.nickname()) # else: # self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>') def post(self): pass", "# # Copyright 2007 Google Inc. # # Licensed under", "language governing permissions and # limitations under the License. #", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "and # limitations under the License. # from google.appengine.api import", "chrome_user = UserId(parent=ndb.Key(\"ID\", client_id or \"*no_id*\"), content = self.request.get('client_id')) chrome_user.put()", "= self.request.get('client_id')) chrome_user.put() #TODO recieve data from client class GetSyncData(object):", "2007 Google Inc. # # Licensed under the Apache License,", "required by applicable law or agreed to in writing, software", "query_user(cls, ancestor_key): return cls.query(ancestor=ancestor_key).order(-cls.date) # ************** MainHandler ************* # class", "\"*no_id*\") userids = UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her er eitthvad') for userid in", "************* # class GetUser(webapp2.RequestHandler): def get(self): self.response.out.write('<html><body>') client_id = self.request.get('client_id')", "userids = UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her er eitthvad') for userid in userids:", "get(self): pass #TODO does user have data class PostData(webapp2.RequestHandler): def", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "License. # from google.appengine.api import users import webapp2 # For", "with the License. # You may obtain a copy of", "user # property user.email() or user.user_id() app = webapp2.WSGIApplication([ ('/',", "this file except in compliance with the License. # You", "# else: # self.redirect(users.create_login_url(self.request.uri)) self.response.out.write('</body></html>') def post(self): pass # **************", "the Apache License, Version 2.0 (the \"License\"); # you may", "or \"*no_id*\") userids = UserId.query_user(ancestor_key).fetch(20) self.response.out.write('her er eitthvad') for userid", "from client class GetSyncData(object): \"\"\"docstring for GetSyncData\"\"\" def __init__(self, arg):", "#!/usr/bin/env python # # Copyright 2007 Google Inc. # #", "# ************** GetUser ************* # class GetUser(webapp2.RequestHandler): def get(self): self.response.out.write('<html><body>')", "************** GetUser ************* # class GetUser(webapp2.RequestHandler): def get(self): self.response.out.write('<html><body>') client_id", "Google account session # user = users.get_current_user() # if user:" ]
[ "import ClientService from comet.protocol.subscriber import VOEventSubscriberFactory __all__ = [\"makeSubscriberService\"] def", "Handlers to which events which pass validation will be passed.", "events which pass validation will be passed. filters : `list`", "be passed. filters : `list` of `str` XPath filters. Will", "a request to filter the alerts being sent. Notes -----", "Comet VOEvent Broker. from twisted.application.internet import ClientService from comet.protocol.subscriber import", "`~comet.icomet.IHandler`. Handlers to which events which pass validation will be", "from twisted.application.internet import ClientService from comet.protocol.subscriber import VOEventSubscriberFactory __all__ =", "\"\"\"Create a reconnecting VOEvent subscriber service. Parameters ---------- endpoint :", "Upstream brokes may not provide support for XPath filtering; in", "the alerts being sent. Notes ----- Upstream brokes may not", "which will be applied to incoming events. Events which fail", "__all__ = [\"makeSubscriberService\"] def makeSubscriberService(endpoint, local_ivo, validators, handlers, filters): \"\"\"Create", "makeSubscriberService(endpoint, local_ivo, validators, handlers, filters): \"\"\"Create a reconnecting VOEvent subscriber", "`None` IVOA identifier for the subscriber. validators : `list` of", "`str` XPath filters. Will be passed to upstream as a", "support for XPath filtering; in this case, the filters suppplied", "default policies of `twisted.application.internet.ClientService`. \"\"\" factory = VOEventSubscriberFactory(local_ivo, validators, handlers,", "is handled according to the default policies of `twisted.application.internet.ClientService`. \"\"\"", "of `twisted.application.internet.ClientService`. \"\"\" factory = VOEventSubscriberFactory(local_ivo, validators, handlers, filters) service", "ignored. Reconnection is handled according to the default policies of", "provide support for XPath filtering; in this case, the filters", "`list` of `str` XPath filters. Will be passed to upstream", "Reconnection is handled according to the default policies of `twisted.application.internet.ClientService`.", "= VOEventSubscriberFactory(local_ivo, validators, handlers, filters) service = ClientService(endpoint, factory) return", "being sent. Notes ----- Upstream brokes may not provide support", "VOEventSubscriberFactory(local_ivo, validators, handlers, filters) service = ClientService(endpoint, factory) return service", "= [\"makeSubscriberService\"] def makeSubscriberService(endpoint, local_ivo, validators, handlers, filters): \"\"\"Create a", "be applied to incoming events. Events which fail validation will", "the service will connect. local_ivo : `str` or `None` IVOA", "as a request to filter the alerts being sent. Notes", "twisted.application.internet import ClientService from comet.protocol.subscriber import VOEventSubscriberFactory __all__ = [\"makeSubscriberService\"]", "handlers, filters): \"\"\"Create a reconnecting VOEvent subscriber service. Parameters ----------", "for the subscriber. validators : `list` of implementers of `~comet.icomet.IValidator`.", "implementers of `~comet.icomet.IValidator`. Validators which will be applied to incoming", "`list` of implementers of `~comet.icomet.IHandler`. Handlers to which events which", "filters : `list` of `str` XPath filters. Will be passed", ": `str` or `None` IVOA identifier for the subscriber. validators", "rejected. handlers : `list` of implementers of `~comet.icomet.IHandler`. Handlers to", "IVOA identifier for the subscriber. validators : `list` of implementers", "comet.protocol.subscriber import VOEventSubscriberFactory __all__ = [\"makeSubscriberService\"] def makeSubscriberService(endpoint, local_ivo, validators,", "which the service will connect. local_ivo : `str` or `None`", "handlers : `list` of implementers of `~comet.icomet.IHandler`. Handlers to which", "policies of `twisted.application.internet.ClientService`. \"\"\" factory = VOEventSubscriberFactory(local_ivo, validators, handlers, filters)", "brokes may not provide support for XPath filtering; in this", "alerts being sent. Notes ----- Upstream brokes may not provide", "`~comet.icomet.IValidator`. Validators which will be applied to incoming events. Events", "events. Events which fail validation will be rejected. handlers :", "local_ivo, validators, handlers, filters): \"\"\"Create a reconnecting VOEvent subscriber service.", "Events which fail validation will be rejected. handlers : `list`", "pass validation will be passed. filters : `list` of `str`", "will be rejected. handlers : `list` of implementers of `~comet.icomet.IHandler`.", "service will connect. local_ivo : `str` or `None` IVOA identifier", "endpoint to which the service will connect. local_ivo : `str`", "for XPath filtering; in this case, the filters suppplied will", "endpoint : implements `twisted.internet.interfaces.IStreamClientEndpoint` The endpoint to which the service", "case, the filters suppplied will be ignored. Reconnection is handled", "VOEvent subscriber service. Parameters ---------- endpoint : implements `twisted.internet.interfaces.IStreamClientEndpoint` The", "be passed to upstream as a request to filter the", "be rejected. handlers : `list` of implementers of `~comet.icomet.IHandler`. Handlers", "sent. Notes ----- Upstream brokes may not provide support for", "will be passed. filters : `list` of `str` XPath filters.", "of `str` XPath filters. Will be passed to upstream as", ": implements `twisted.internet.interfaces.IStreamClientEndpoint` The endpoint to which the service will", "will be ignored. Reconnection is handled according to the default", "The endpoint to which the service will connect. local_ivo :", "`str` or `None` IVOA identifier for the subscriber. validators :", "# Comet VOEvent Broker. from twisted.application.internet import ClientService from comet.protocol.subscriber", ": `list` of implementers of `~comet.icomet.IValidator`. Validators which will be", "passed. filters : `list` of `str` XPath filters. Will be", "Parameters ---------- endpoint : implements `twisted.internet.interfaces.IStreamClientEndpoint` The endpoint to which", "service. Parameters ---------- endpoint : implements `twisted.internet.interfaces.IStreamClientEndpoint` The endpoint to", "Validators which will be applied to incoming events. Events which", "validation will be rejected. handlers : `list` of implementers of", "validation will be passed. filters : `list` of `str` XPath", "will be applied to incoming events. Events which fail validation", "this case, the filters suppplied will be ignored. Reconnection is", "suppplied will be ignored. Reconnection is handled according to the", "VOEventSubscriberFactory __all__ = [\"makeSubscriberService\"] def makeSubscriberService(endpoint, local_ivo, validators, handlers, filters):", "to filter the alerts being sent. Notes ----- Upstream brokes", "def makeSubscriberService(endpoint, local_ivo, validators, handlers, filters): \"\"\"Create a reconnecting VOEvent", "filters): \"\"\"Create a reconnecting VOEvent subscriber service. Parameters ---------- endpoint", "to the default policies of `twisted.application.internet.ClientService`. \"\"\" factory = VOEventSubscriberFactory(local_ivo,", "Notes ----- Upstream brokes may not provide support for XPath", "---------- endpoint : implements `twisted.internet.interfaces.IStreamClientEndpoint` The endpoint to which the", "the filters suppplied will be ignored. Reconnection is handled according", "to which events which pass validation will be passed. filters", "to upstream as a request to filter the alerts being", "reconnecting VOEvent subscriber service. Parameters ---------- endpoint : implements `twisted.internet.interfaces.IStreamClientEndpoint`", "filters. Will be passed to upstream as a request to", "VOEvent Broker. from twisted.application.internet import ClientService from comet.protocol.subscriber import VOEventSubscriberFactory", "subscriber service. Parameters ---------- endpoint : implements `twisted.internet.interfaces.IStreamClientEndpoint` The endpoint", "factory = VOEventSubscriberFactory(local_ivo, validators, handlers, filters) service = ClientService(endpoint, factory)", "`list` of implementers of `~comet.icomet.IValidator`. Validators which will be applied", "not provide support for XPath filtering; in this case, the", ": `list` of `str` XPath filters. Will be passed to", "filter the alerts being sent. Notes ----- Upstream brokes may", "to incoming events. Events which fail validation will be rejected.", "XPath filtering; in this case, the filters suppplied will be", "in this case, the filters suppplied will be ignored. Reconnection", "a reconnecting VOEvent subscriber service. Parameters ---------- endpoint : implements", "which pass validation will be passed. filters : `list` of", "handled according to the default policies of `twisted.application.internet.ClientService`. \"\"\" factory", "import VOEventSubscriberFactory __all__ = [\"makeSubscriberService\"] def makeSubscriberService(endpoint, local_ivo, validators, handlers,", "upstream as a request to filter the alerts being sent.", "which fail validation will be rejected. handlers : `list` of", "fail validation will be rejected. handlers : `list` of implementers", "from comet.protocol.subscriber import VOEventSubscriberFactory __all__ = [\"makeSubscriberService\"] def makeSubscriberService(endpoint, local_ivo,", "connect. local_ivo : `str` or `None` IVOA identifier for the", "`twisted.application.internet.ClientService`. \"\"\" factory = VOEventSubscriberFactory(local_ivo, validators, handlers, filters) service =", "which events which pass validation will be passed. filters :", "passed to upstream as a request to filter the alerts", "----- Upstream brokes may not provide support for XPath filtering;", "to which the service will connect. local_ivo : `str` or", "of implementers of `~comet.icomet.IValidator`. Validators which will be applied to", "of `~comet.icomet.IValidator`. Validators which will be applied to incoming events.", "be ignored. Reconnection is handled according to the default policies", "request to filter the alerts being sent. Notes ----- Upstream", "filtering; in this case, the filters suppplied will be ignored.", "`twisted.internet.interfaces.IStreamClientEndpoint` The endpoint to which the service will connect. local_ivo", "of `~comet.icomet.IHandler`. Handlers to which events which pass validation will", "according to the default policies of `twisted.application.internet.ClientService`. \"\"\" factory =", "the default policies of `twisted.application.internet.ClientService`. \"\"\" factory = VOEventSubscriberFactory(local_ivo, validators,", "implements `twisted.internet.interfaces.IStreamClientEndpoint` The endpoint to which the service will connect.", "or `None` IVOA identifier for the subscriber. validators : `list`", "the subscriber. validators : `list` of implementers of `~comet.icomet.IValidator`. Validators", "applied to incoming events. Events which fail validation will be", "XPath filters. Will be passed to upstream as a request", ": `list` of implementers of `~comet.icomet.IHandler`. Handlers to which events", "subscriber. validators : `list` of implementers of `~comet.icomet.IValidator`. Validators which", "may not provide support for XPath filtering; in this case,", "Will be passed to upstream as a request to filter", "validators : `list` of implementers of `~comet.icomet.IValidator`. Validators which will", "identifier for the subscriber. validators : `list` of implementers of", "local_ivo : `str` or `None` IVOA identifier for the subscriber.", "of implementers of `~comet.icomet.IHandler`. Handlers to which events which pass", "Broker. from twisted.application.internet import ClientService from comet.protocol.subscriber import VOEventSubscriberFactory __all__", "[\"makeSubscriberService\"] def makeSubscriberService(endpoint, local_ivo, validators, handlers, filters): \"\"\"Create a reconnecting", "incoming events. Events which fail validation will be rejected. handlers", "ClientService from comet.protocol.subscriber import VOEventSubscriberFactory __all__ = [\"makeSubscriberService\"] def makeSubscriberService(endpoint,", "validators, handlers, filters): \"\"\"Create a reconnecting VOEvent subscriber service. Parameters", "will connect. local_ivo : `str` or `None` IVOA identifier for", "\"\"\" factory = VOEventSubscriberFactory(local_ivo, validators, handlers, filters) service = ClientService(endpoint,", "implementers of `~comet.icomet.IHandler`. Handlers to which events which pass validation", "filters suppplied will be ignored. Reconnection is handled according to" ]
[ "this is a valid 'etc' build. try: self.cbb.ValidateEtcBuild(bot) except ValueError", "the 'etc' builder if possible. A tryserver BuildBucket build takes", "not config_name: raise ValueError('Empty config name') if not self._ETC_TARGET_RE.match(config_name): raise", "name. properties = params.get('properties', {}) config_name = properties.get('cbb_config') if not", "self._GITILES_PATH_TMPL % { 'repo': poller.repo_path, 'revision': change.revision, 'path': change.files[0], }", "try job(s) %s for %s\" % (builder_name, ssid, bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid,", "sender_factory) @defer.inlineCallbacks def gotChange(self, change, important): try: yield self._gotChangeImpl(change, important)", "failed the validation step. This is most likely because <br>you", "m['Date'] = formatdate(localtime=True) m['Subject'] = 'Tryjob failed validation' m['From'] =", "% change.repository) # pylint: disable=W0631 file_contents = yield self.loadGitilesChangeFile(poller, change)", "configured.') if not config_name: raise ValueError('Empty config name') if not", "StringResponse from master import gitiles_poller from master.try_job_base import BadJobfile class", "spaces (to spill into extra args). _ETC_TARGET_RE = re.compile(r'^[a-zA-Z][\\w-]+\\w$') def", "error information:\") html_content.append(error.replace('\\n', '<br>\\n')) html_content.append(self.email_footer) m = Message() m.set_payload('<br><br>'.join(html_content), 'utf8')", "false positives. # pylint: disable=E0611,F0401 from email.Message import Message from", "props def create_buildset(self, ssid, parsed_job): \"\"\"Overriding base class method.\"\"\" dlist", "= parsed_job.get(name) if val is None: if required: error_msgs.append('Option %s", "again. If you still see<br>this message please contact <EMAIL>.<br> \"\"\"", "email_footer: The footer to append to any emails sent out.", "build's cbuildbot config target. - `extra_args` property (optional) may be", "the format: # (name, type, required). fields = [('name', basestring,", "property is not a list.') if not isinstance(properties.get('slaves_request', []), list):", "master's try job related configuration. configs (dict): A dictionary of", "% (config_name,)) def translate_v1_to_v2(parsed_job): \"\"\"Translate tryjob desc from V1 to", "have bots defined to execute. for bot in parsed_job['bot']: if", "extra_args: # This field can be quite large, and exceed", "_ETC_TARGET_RE = re.compile(r'^[a-zA-Z][\\w-]+\\w$') def __init__(self, configs, etc_builder=None): \"\"\"Holds base state", "This will be as up-to-date as the Chromite pin. etc_builder", "BadJobfile('\\n'.join(error_msgs)) def get_props(self, config, options): \"\"\"Overriding base class method.\"\"\" props", "on which this build may run. - Additional BuildBot properties", "Create a block to work around evil sys.modules manipulation in", "scheduler: %s' % (e,)) import traceback traceback.print_exc() @defer.inlineCallbacks def _gotChangeImpl(self,", "import formatdate except ImportError: raise from buildbot.process.properties import Properties from", "parse job JSON: %s\" % (e.message,)) def validate_job(self, parsed_job): #", "change) parsed = {} try: parsed = self.load_job(file_contents) self.validate_job(parsed) self.updateJobDesc(parsed)", "# Compress it, Base64 encode it, and prefix it with", "tryjob version to another. _TRANSLATION_FUNCS = { 1 : translate_v1_to_v2,", "import StringResponse from master import gitiles_poller from master.try_job_base import BadJobfile", "raise ValueError('`extra_args` property is not a list.') if not isinstance(properties.get('slaves_request',", "translate from one tryjob version to another. _TRANSLATION_FUNCS = {", "sys.modules manipulation in # email/__init__.py that triggers pylint false positives.", "parsed_job['version'] = 3 class CrOSTryJobGit(TryBase): \"\"\"Poll a Git server to", "dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name, external_idstring=buildset_name, builderNames=[builder_name], properties=self.get_props(bot, parsed_job))) return defer.DeferredList(dlist) def send_validation_fail_email(self,", "self._PROPERTY_SOURCE) extra_args = options.get('extra_args') if extra_args: # This field can", ": translate_v2_to_v3, } # Template path URL component to retrieve", "log.msg('Exception in try job scheduler: %s' % (e,)) import traceback", "list, False), ('version', int, True), ('slaves_request', list, False), ] error_msgs", "tryjob desc from V2 to V3.\"\"\" # V3 --remote-patches format", "(e,)) import traceback traceback.print_exc() @defer.inlineCallbacks def _gotChangeImpl(self, change, _important): \"\"\"Process", "'etc' builder if possible. A tryserver BuildBucket build takes the", "('version', int, True), ('slaves_request', list, False), ] error_msgs = []", "from_addr self.reply_to = reply_to self.email_footer = email_footer self.cbb = cbuildbot_configs", "\"\"\" Your tryjob with name '%(name)s' failed the validation step.", "configuration. configs (dict): A dictionary of all known CrOS configs.", "in parsed_job.get('extra_args', ())): raise BadJobfile('Cannot translate --remote-patches from tryjob v.2", "self.etc_builder = etc_builder def AddBuildBucketHooks(self, c): \"\"\"Build mutation hook called", "!= 1: # We only accept changes with 1 diff", "V3.\"\"\" # V3 --remote-patches format is not backwards compatible. if", "the modified file. path = self._GITILES_PATH_TMPL % { 'repo': poller.repo_path,", "parsed_job['version'] <= prev_ver: raise AssertionError('translation function %s not incrementing version!'", "from email.Message import Message from email.Utils import formatdate except ImportError:", "(to spill into extra args). _ETC_TARGET_RE = re.compile(r'^[a-zA-Z][\\w-]+\\w$') def __init__(self,", "email.Utils import formatdate except ImportError: raise from buildbot.process.properties import Properties", "most likely because <br>you are running an older version of", "that this is a valid 'etc' build. try: self.cbb.ValidateEtcBuild(bot) except", "\"\"\"Translate tryjob desc from V1 to V2.\"\"\" parsed_job.setdefault('extra_args', []).append('--remote-trybot') parsed_job['version']", "cbuildbot config target. - `extra_args` property (optional) may be a", "manipulation in # email/__init__.py that triggers pylint false positives. #", "name, emails, error): \"\"\"Notify the user via email about the", "properties or {}) self.pollers = pollers self.smtp_host = smtp_host self.from_addr", "Message() m.set_payload('<br><br>'.join(html_content), 'utf8') m.set_type(\"text/html\") m['Date'] = formatdate(localtime=True) m['Subject'] = 'Tryjob", "reactor.connectTCP(self.smtp_host, 25, sender_factory) @defer.inlineCallbacks def gotChange(self, change, important): try: yield", "def get_props(self, config, options): \"\"\"Overriding base class method.\"\"\" props =", "Compress it, Base64 encode it, and prefix it with \"z:\"", "options.get('slaves_request', []), self._PROPERTY_SOURCE) props.setProperty('cbb_config', config, self._PROPERTY_SOURCE) extra_args = options.get('extra_args') if", "name doesn't contain spaces (to spill into extra args). _ETC_TARGET_RE", "('user', basestring, True), ('email', list, True), ('bot', list, True), ('extra_args',", "import zlib from StringIO import StringIO try: # Create a", "repo sync.') parsed_job['version'] = 3 class CrOSTryJobGit(TryBase): \"\"\"Poll a Git", "in # email/__init__.py that triggers pylint false positives. # pylint:", "if parsed_job['version'] <= prev_ver: raise AssertionError('translation function %s not incrementing", "list will go to an 'etc' builder, if available. properties:", "it will be mapped to the 'etc' builder if possible.", "utils from twisted.mail.smtp import SMTPSenderFactory from twisted.python import log from", "code paths converge. \"\"\" def params_hook(params, _build): # Map `cbb_config`", "patches to try.\"\"\" # Name of property source for generated", "wrong type!' % name) # If we're an 'etc' job,", "% { 'repo': poller.repo_path, 'revision': change.revision, 'path': change.files[0], } contents_b64", "the config name is unknown, it will be mapped to", "reactor, utils from twisted.mail.smtp import SMTPSenderFactory from twisted.python import log", "if val is None: if required: error_msgs.append('Option %s missing!' %", "mapping it based on its config. If an 'etc' builder", "smtp_host self.from_addr = from_addr self.reply_to = reply_to self.email_footer = email_footer", "1 diff file. raise BadJobfile( 'Try job with too many", "to a builder name. properties = params.get('properties', {}) config_name =", "execute. for bot in parsed_job['bot']: if bot in self.cbb.configs: continue", "type!' % name) # If we're an 'etc' job, we", "sourcestamp/buildsets created will be merge-able. ssid = yield self.master.db.sourcestamps.addSourceStamp( branch=change.branch,", "self.from_addr = from_addr self.reply_to = reply_to self.email_footer = email_footer self.cbb", "poller in self.pollers: if not isinstance(poller, gitiles_poller.GitilesPoller): continue if poller.repo_url", "pylint: disable=E0611,F0401 from email.Message import Message from email.Utils import formatdate", "path = self._GITILES_PATH_TMPL % { 'repo': poller.repo_path, 'revision': change.revision, 'path':", "dictionary of all known CrOS configs. This will be as", "# Name of property source for generated properties. _PROPERTY_SOURCE =", "changeids=[change.number]) yield self.create_buildset(ssid, parsed) @defer.inlineCallbacks def loadGitilesChangeFile(self, poller, change): if", "parsed = {} try: parsed = self.load_job(file_contents) self.validate_job(parsed) self.updateJobDesc(parsed) except", "try: yield self._gotChangeImpl(change, important) except Exception as e: log.msg('Exception in", "from V1 to V2.\"\"\" parsed_job.setdefault('extra_args', []).append('--remote-trybot') parsed_job['version'] = 2 def", "The email address to display as being sent from. reply_to:", "emails, StringIO(m.as_string()), result) reactor.connectTCP(self.smtp_host, 25, sender_factory) @defer.inlineCallbacks def gotChange(self, change,", "converted to BuildBot properties and referenced as such in other", "will be mapped to the 'etc' builder if possible. A", "emails sent out. cbuildbot_configs: (CbuildbotConfigs) A configuration set instance. Any", "= 3 class CrOSTryJobGit(TryBase): \"\"\"Poll a Git server to grab", "validation' m['From'] = self.from_addr m['Reply-To'] = self.reply_to result = defer.Deferred()", "limits. # Compress it, Base64 encode it, and prefix it", "result = defer.Deferred() sender_factory = SMTPSenderFactory(self.from_addr, emails, StringIO(m.as_string()), result) reactor.connectTCP(self.smtp_host,", "incrementing version!' % str(translation_func)) def __init__(self, name, pollers, smtp_host, from_addr,", "Base64 encode it, and prefix it with \"z:\" so the", "exceed BuildBot property limits. # Compress it, Base64 encode it,", "change.repository) # pylint: disable=W0631 file_contents = yield self.loadGitilesChangeFile(poller, change) parsed", "= [] buildset_name = '%s:%s' % (parsed_job['user'], parsed_job['name']) for bot", "as e: self.send_validation_fail_email(parsed.setdefault('name', ''), parsed['email'], str(e)) raise # The sourcestamp/buildsets", "be a JSON list of additional parameters to pass to", "repo git pit pollers. smtp_host: The smtp host for sending", "large, and exceed BuildBot property limits. # Compress it, Base64", "by including one or more BuildBucket `changes` parameters: [{'author': {'email':", "triggers pylint false positives. # pylint: disable=E0611,F0401 from email.Message import", "may be added. NOTE: Internally, all of these parameters are", "a file from # Gitiles. _GITILES_PATH_TMPL = '%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod def", "raise BadJobfile(\"Failed to parse job JSON: %s\" % (e.message,)) def", "Additional BuildBot properties may be added. NOTE: Internally, all of", "import Message from email.Utils import formatdate except ImportError: raise from", "CrOS configs. This will be as up-to-date as the Chromite", "<EMAIL>.<br> \"\"\" html_content.append(body % {'name': name}) html_content.append(\"Extra error information:\") html_content.append(error.replace('\\n',", "_TRYJOB_FORMAT_VERSION = 3 # Functions that translate from one tryjob", "= self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating '%s' try job(s) %s for %s\" %", "run. - Additional BuildBot properties may be added. NOTE: Internally,", "an 'etc' builder, if available. properties: See TryBase.__init__() \"\"\" TryBase.__init__(self,", "yield self.create_buildset(ssid, parsed) @defer.inlineCallbacks def loadGitilesChangeFile(self, poller, change): if len(change.files)", "be dispatched. \"\"\" if not self.etc_builder: raise ValueError('etc builder is", "mutation hook called via BuildBucket when scheduling builds. The cbuildbot", "\"\"\"Holds base state of the master's try job related configuration.", "this source code is governed by a BSD-style license that", "list of job repo git pit pollers. smtp_host: The smtp", "True), ('email', list, True), ('bot', list, True), ('extra_args', list, False),", "# A list of field description tuples of the format:", "config target. - `extra_args` property (optional) may be a JSON", "v.2 to ' 'v.3. Please run repo sync.') parsed_job['version'] =", "parsed_job.get(name) if val is None: if required: error_msgs.append('Option %s missing!'", "name (%s).' % (config_name,)) def translate_v1_to_v2(parsed_job): \"\"\"Translate tryjob desc from", "config, options): \"\"\"Overriding base class method.\"\"\" props = Properties() props.setProperty('slaves_request',", "email.Message import Message from email.Utils import formatdate except ImportError: raise", "config name') if not self._ETC_TARGET_RE.match(config_name): raise ValueError('invalid etc config name", "] error_msgs = [] for name, f_type, required in fields:", "constructs the same property set, so code paths converge. \"\"\"", "name doesn't begin with a flag ('--') # - The", "V1 to V2.\"\"\" parsed_job.setdefault('extra_args', []).append('--remote-trybot') parsed_job['version'] = 2 def translate_v2_to_v3(parsed_job):", "code. The Git poller also constructs the same property set,", "parsed_job['name']) for bot in parsed_job['bot']: builder_name = self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating '%s'", "# Copyright (c) 2012 The Chromium Authors. All rights reserved.", "ignored. - BuildBot changes can be added by including one", "of this source code is governed by a BSD-style license", "= self._GITILES_PATH_TMPL % { 'repo': poller.repo_path, 'revision': change.revision, 'path': change.files[0],", "method.\"\"\" props = Properties() props.setProperty('slaves_request', options.get('slaves_request', []), self._PROPERTY_SOURCE) props.setProperty('cbb_config', config,", "the build's cbuildbot config target. - `extra_args` property (optional) may", "False), ] error_msgs = [] for name, f_type, required in", "parsed_job): \"\"\"Overriding base class method.\"\"\" dlist = [] buildset_name =", "found in the LICENSE file. import base64 import json import", "reserved. # Use of this source code is governed by", "from buildbot.process.properties import Properties from buildbot.schedulers.trysched import TryBase from twisted.internet", "= \"\"\" Your tryjob with name '%(name)s' failed the validation", "fields = [('name', basestring, True), ('user', basestring, True), ('email', list,", "which this build may run. - Additional BuildBot properties may", "of the modified file. path = self._GITILES_PATH_TMPL % { 'repo':", "- `cbb_config` property must be set to the build's cbuildbot", "name') if not self._ETC_TARGET_RE.match(config_name): raise ValueError('invalid etc config name (%s).'", "in the 'Reply-To' email header field. email_footer: The footer to", "a Git server to grab patches to try.\"\"\" # Name", "import base64 import json import os import re import shutil", "from unsupported repository %s' % change.repository) # pylint: disable=W0631 file_contents", "JSON list of slaves on which this build may run.", "job repo git pit pollers. smtp_host: The smtp host for", "can be # found in the LICENSE file. import base64", "A list of job repo git pit pollers. smtp_host: The", "in other areas of code. The Git poller also constructs", "footer to append to any emails sent out. cbuildbot_configs: (CbuildbotConfigs)", "- Empty `builder_name` parameter. If one is supplied, it will", "sent out. cbuildbot_configs: (CbuildbotConfigs) A configuration set instance. Any 'bot'", "form: - Empty `builder_name` parameter. If one is supplied, it", "return config['_template'] or config_name self.ValidateEtcBuild(config_name) return self.etc_builder def ValidateEtcBuild(self, config_name):", "- BuildBot changes can be added by including one or", "areas of code. The Git poller also constructs the same", "tryjob desc from V1 to V2.\"\"\" parsed_job.setdefault('extra_args', []).append('--remote-trybot') parsed_job['version'] =", "basestring, True), ('email', list, True), ('bot', list, True), ('extra_args', list,", "tryjob error.\"\"\" html_content = [] html_content.append('<html><body>') body = \"\"\" Your", "'utf8') m.set_type(\"text/html\") m['Date'] = formatdate(localtime=True) m['Subject'] = 'Tryjob failed validation'", "these parameters are converted to BuildBot properties and referenced as", "desc from V1 to V2.\"\"\" parsed_job.setdefault('extra_args', []).append('--remote-trybot') parsed_job['version'] = 2", "class. Arguments: name: See TryBase.__init__(). pollers: A list of job", "error_msgs.append('Option %s missing!' % name) elif not isinstance(val, f_type): error_msgs.append('Option", "The version of tryjob that the master is expecting. _TRYJOB_FORMAT_VERSION", "base64 import json import os import re import shutil import", "of code. The Git poller also constructs the same property", "the name of the etc builder. \"\"\" self.configs = configs", "AssertionError('translation function %s not incrementing version!' % str(translation_func)) def __init__(self,", "build may run. - Additional BuildBot properties may be added.", "are converted to BuildBot properties and referenced as such in", "create_buildset(self, ssid, parsed_job): \"\"\"Overriding base class method.\"\"\" dlist = []", "'revision': change.revision, 'path': change.files[0], } contents_b64 = yield poller.agent.request('GET', path,", "--remote-patches from tryjob v.2 to ' 'v.3. Please run repo", "via BuildBucket when scheduling builds. The cbuildbot config is specified", "contents of a file from # Gitiles. _GITILES_PATH_TMPL = '%(repo)s/+/%(revision)s/%(path)s?format=text'", "# (name, type, required). fields = [('name', basestring, True), ('user',", "try: self.cbb.ValidateEtcBuild(bot) except ValueError as e: error_msgs.append(\"Invalid 'etc' build (%s):", "config name (%s).' % (config_name,)) def translate_v1_to_v2(parsed_job): \"\"\"Translate tryjob desc", "pylint false positives. # pylint: disable=E0611,F0401 from email.Message import Message", "raise ValueError('etc builder is not configured.') if not config_name: raise", "import BadJobfile class CbuildbotConfigs(object): # Valid 'etc' builder targets. Specifically,", "of job repo git pit pollers. smtp_host: The smtp host", "def gotChange(self, change, important): try: yield self._gotChangeImpl(change, important) except Exception", "reason=buildset_name, external_idstring=buildset_name, builderNames=[builder_name], properties=self.get_props(bot, parsed_job))) return defer.DeferredList(dlist) def send_validation_fail_email(self, name,", "_important): \"\"\"Process the received data and send the queue buildset.\"\"\"", "knows its size. extra_args = 'z:' + base64.b64encode(zlib.compress(json.dumps( extra_args))) props.setProperty('cbb_extra_args',", "False), ('version', int, True), ('slaves_request', list, False), ] error_msgs =", "defer.DeferredList(dlist) def send_validation_fail_email(self, name, emails, error): \"\"\"Notify the user via", "to the tryjob. - `slaves_request` property (optional) may be a", "job with too many files %s' % (','.join(change.files))) # Load", "self.from_addr m['Reply-To'] = self.reply_to result = defer.Deferred() sender_factory = SMTPSenderFactory(self.from_addr,", "_GITILES_PATH_TMPL = '%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod def updateJobDesc(cls, parsed_job): \"\"\"Ensure job description", "ValueError('etc builder is not configured.') if not config_name: raise ValueError('Empty", "# - The build name doesn't contain spaces (to spill", "# Add mandatory properties to build. params['properties'] = properties c['buildbucket_params_hook']", "BadJobfile( 'Try job with too many files %s' % (','.join(change.files)))", "@defer.inlineCallbacks def loadGitilesChangeFile(self, poller, change): if len(change.files) != 1: #", "ssid = yield self.master.db.sourcestamps.addSourceStamp( branch=change.branch, revision=change.revision, project=change.project, repository=change.repository, changeids=[change.number]) yield", "name, [], properties or {}) self.pollers = pollers self.smtp_host =", "from one tryjob version to another. _TRANSLATION_FUNCS = { 1", "The build name doesn't begin with a flag ('--') #", "raise BadJobfile('Cannot translate --remote-patches from tryjob v.2 to ' 'v.3.", "\"\"\"Process the received data and send the queue buildset.\"\"\" #", "function %s not incrementing version!' % str(translation_func)) def __init__(self, name,", "extra_args))) props.setProperty('cbb_extra_args', extra_args, self._PROPERTY_SOURCE) return props def create_buildset(self, ssid, parsed_job):", "self._ETC_TARGET_RE.match(config_name): raise ValueError('invalid etc config name (%s).' % (config_name,)) def", "NOTE: Internally, all of these parameters are converted to BuildBot", "waterfall builder name by mapping it based on its config.", "Properties() props.setProperty('slaves_request', options.get('slaves_request', []), self._PROPERTY_SOURCE) props.setProperty('cbb_config', config, self._PROPERTY_SOURCE) extra_args =", "tryjob. - `slaves_request` property (optional) may be a JSON list", "[]), list): raise ValueError('`slaves_request` is not a list.') # Add", "pollers: A list of job repo git pit pollers. smtp_host:", "Base64 contents of a file from # Gitiles. _GITILES_PATH_TMPL =", "etc_builder def AddBuildBucketHooks(self, c): \"\"\"Build mutation hook called via BuildBucket", "master.try_job_base import BadJobfile class CbuildbotConfigs(object): # Valid 'etc' builder targets.", "not self._ETC_TARGET_RE.match(config_name): raise ValueError('invalid etc config name (%s).' % (config_name,))", "updateJobDesc(cls, parsed_job): \"\"\"Ensure job description is in the format we", "a specified build config_name is candidate for etc build. Raises", "smtp_host, from_addr, reply_to, email_footer, cbuildbot_configs, properties=None): \"\"\"Initialize the class. Arguments:", "<br>you are running an older version of cbuildbot. Please run", "= self.configs.get(config_name) if config: return config['_template'] or config_name self.ValidateEtcBuild(config_name) return", "including one or more BuildBucket `changes` parameters: [{'author': {'email': '<EMAIL>'}}].", "The Git poller also constructs the same property set, so", "'etc' job, we must have bots defined to execute. for", "Raises a ValueError if an etc build cannot be dispatched.", "translate_v1_to_v2, 2 : translate_v2_to_v3, } # Template path URL component", "poller also constructs the same property set, so code paths", "(config_name,)) def translate_v1_to_v2(parsed_job): \"\"\"Translate tryjob desc from V1 to V2.\"\"\"", "Template path URL component to retrieve the Base64 contents of", "config name is unknown, it will be mapped to the", "build name doesn't contain spaces (to spill into extra args).", "(builder_name, ssid, bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name, external_idstring=buildset_name, builderNames=[builder_name], properties=self.get_props(bot, parsed_job))) return", "= from_addr self.reply_to = reply_to self.email_footer = email_footer self.cbb =", "work around evil sys.modules manipulation in # email/__init__.py that triggers", "import TryBase from twisted.internet import defer, reactor, utils from twisted.mail.smtp", "yield self.loadGitilesChangeFile(poller, change) parsed = {} try: parsed = self.load_job(file_contents)", "except ImportError: raise from buildbot.process.properties import Properties from buildbot.schedulers.trysched import", "(str): If not None, the name of the etc builder.", "parameters to pass to the tryjob. - `slaves_request` property (optional)", "for etc build. Raises a ValueError if an etc build", "is configured and the config name is unknown, it will", "= yield self.loadGitilesChangeFile(poller, change) parsed = {} try: parsed =", "sync.') parsed_job['version'] = 3 class CrOSTryJobGit(TryBase): \"\"\"Poll a Git server", "from common.twisted_util.response import StringResponse from master import gitiles_poller from master.try_job_base", "JSON list of additional parameters to pass to the tryjob.", "the same property set, so code paths converge. \"\"\" def", "prev_ver: raise AssertionError('translation function %s not incrementing version!' % str(translation_func))", "StringIO try: # Create a block to work around evil", "config_name = properties.get('cbb_config') if not config_name: raise ValueError('Missing required `cbb_config`", "= 2 def translate_v2_to_v3(parsed_job): \"\"\"Translate tryjob desc from V2 to", "that translate from one tryjob version to another. _TRANSLATION_FUNCS =", "known CrOS configs. This will be as up-to-date as the", "from_addr: The email address to display as being sent from.", "and try again. If you still see<br>this message please contact", "TryBase.__init__(). pollers: A list of job repo git pit pollers.", "ValueError('invalid etc config name (%s).' % (config_name,)) def translate_v1_to_v2(parsed_job): \"\"\"Translate", "name '%(name)s' failed the validation step. This is most likely", "import Properties from buildbot.schedulers.trysched import TryBase from twisted.internet import defer,", "basestring, True), ('user', basestring, True), ('email', list, True), ('bot', list,", "of the format: # (name, type, required). fields = [('name',", "import log from common.twisted_util.response import StringResponse from master import gitiles_poller", "continue # Assert that this is a valid 'etc' build.", "m.set_payload('<br><br>'.join(html_content), 'utf8') m.set_type(\"text/html\") m['Date'] = formatdate(localtime=True) m['Subject'] = 'Tryjob failed", "request outside of this list will go to an 'etc'", "log from common.twisted_util.response import StringResponse from master import gitiles_poller from", "being sent from. reply_to: The email address to put in", "build (%s): %s\" % (bot, e.message)) if error_msgs: raise BadJobfile('\\n'.join(error_msgs))", "git pit pollers. smtp_host: The smtp host for sending out", "for a in parsed_job.get('extra_args', ())): raise BadJobfile('Cannot translate --remote-patches from", "to ' 'v.3. Please run repo sync.') parsed_job['version'] = 3", "('extra_args', list, False), ('version', int, True), ('slaves_request', list, False), ]", "older version of cbuildbot. Please run <br><code>repo sync chromiumos/chromite</code> and", "TryBase from twisted.internet import defer, reactor, utils from twisted.mail.smtp import", "Any 'bot' request outside of this list will go to", "= Properties() props.setProperty('slaves_request', options.get('slaves_request', []), self._PROPERTY_SOURCE) props.setProperty('cbb_config', config, self._PROPERTY_SOURCE) extra_args", "a list.') # Add mandatory properties to build. params['properties'] =", "will be ignored. - BuildBot changes can be added by", "`cbb_config` property.') params['builder_name'] = self.GetBuilderForConfig(config_name) # Validate other fields. if", "is governed by a BSD-style license that can be #", "import re import shutil import zlib from StringIO import StringIO", "if not config_name: raise ValueError('Empty config name') if not self._ETC_TARGET_RE.match(config_name):", "block to work around evil sys.modules manipulation in # email/__init__.py", "missing!' % name) elif not isinstance(val, f_type): error_msgs.append('Option %s of", "required `cbb_config` property.') params['builder_name'] = self.GetBuilderForConfig(config_name) # Validate other fields.", "yield self.master.db.sourcestamps.addSourceStamp( branch=change.branch, revision=change.revision, project=change.project, repository=change.repository, changeids=[change.number]) yield self.create_buildset(ssid, parsed)", "3 # Functions that translate from one tryjob version to", "one is supplied, it will be ignored. - BuildBot changes", "bot in parsed_job['bot']: if bot in self.cbb.configs: continue # Assert", "append to any emails sent out. cbuildbot_configs: (CbuildbotConfigs) A configuration", "self.reply_to result = defer.Deferred() sender_factory = SMTPSenderFactory(self.from_addr, emails, StringIO(m.as_string()), result)", "job scheduler: %s' % (e,)) import traceback traceback.print_exc() @defer.inlineCallbacks def", "# Validate other fields. if not isinstance(properties.get('extra_args', []), list): raise", "Use of this source code is governed by a BSD-style", "True), ('bot', list, True), ('extra_args', list, False), ('version', int, True),", "= self.from_addr m['Reply-To'] = self.reply_to result = defer.Deferred() sender_factory =", "from buildbot.schedulers.trysched import TryBase from twisted.internet import defer, reactor, utils", "a builder name. properties = params.get('properties', {}) config_name = properties.get('cbb_config')", "branch=change.branch, revision=change.revision, project=change.project, repository=change.repository, changeids=[change.number]) yield self.create_buildset(ssid, parsed) @defer.inlineCallbacks def", "poller that this change came from. for poller in self.pollers:", "name) elif not isinstance(val, f_type): error_msgs.append('Option %s of wrong type!'", "self.loadGitilesChangeFile(poller, change) parsed = {} try: parsed = self.load_job(file_contents) self.validate_job(parsed)", "The cbuildbot config is specified in the `cbb_config` property. The", "% (bot, e.message)) if error_msgs: raise BadJobfile('\\n'.join(error_msgs)) def get_props(self, config,", "('slaves_request', list, False), ] error_msgs = [] for name, f_type,", "server to grab patches to try.\"\"\" # Name of property", "Arguments: name: See TryBase.__init__(). pollers: A list of job repo", "pollers, smtp_host, from_addr, reply_to, email_footer, cbuildbot_configs, properties=None): \"\"\"Initialize the class.", "list.') # Add mandatory properties to build. params['properties'] = properties", "'Received tryjob from unsupported repository %s' % change.repository) # pylint:", "break else: raise BadJobfile( 'Received tryjob from unsupported repository %s'", "the received data and send the queue buildset.\"\"\" # Find", "merge-able. ssid = yield self.master.db.sourcestamps.addSourceStamp( branch=change.branch, revision=change.revision, project=change.project, repository=change.repository, changeids=[change.number])", "class method.\"\"\" props = Properties() props.setProperty('slaves_request', options.get('slaves_request', []), self._PROPERTY_SOURCE) props.setProperty('cbb_config',", "Specifically, this ensures: # - The build name doesn't begin", "with 1 diff file. raise BadJobfile( 'Try job with too", "% (','.join(change.files))) # Load the contents of the modified file.", "may be a JSON list of slaves on which this", "rights reserved. # Use of this source code is governed", "property (optional) may be a JSON list of slaves on", "pit pollers. smtp_host: The smtp host for sending out error", "Assert that this is a valid 'etc' build. try: self.cbb.ValidateEtcBuild(bot)", "_build): # Map `cbb_config` to a builder name. properties =", "of all known CrOS configs. This will be as up-to-date", "be merge-able. ssid = yield self.master.db.sourcestamps.addSourceStamp( branch=change.branch, revision=change.revision, project=change.project, repository=change.repository,", "and the config name is unknown, it will be mapped", "error_msgs = [] for name, f_type, required in fields: val", "not isinstance(properties.get('slaves_request', []), list): raise ValueError('`slaves_request` is not a list.')", "name, f_type, required in fields: val = parsed_job.get(name) if val", "= {} try: parsed = self.load_job(file_contents) self.validate_job(parsed) self.updateJobDesc(parsed) except BadJobfile", "%s of wrong type!' % name) # If we're an", "def _gotChangeImpl(self, change, _important): \"\"\"Process the received data and send", "translate_v1_to_v2(parsed_job): \"\"\"Translate tryjob desc from V1 to V2.\"\"\" parsed_job.setdefault('extra_args', []).append('--remote-trybot')", "created will be merge-able. ssid = yield self.master.db.sourcestamps.addSourceStamp( branch=change.branch, revision=change.revision,", "if poller.repo_url == change.repository: break else: raise BadJobfile( 'Received tryjob", "json.loads(data) except ValueError as e: raise BadJobfile(\"Failed to parse job", "to another. _TRANSLATION_FUNCS = { 1 : translate_v1_to_v2, 2 :", "email header field. email_footer: The footer to append to any", "BuildBucket when scheduling builds. The cbuildbot config is specified in", "self.pollers = pollers self.smtp_host = smtp_host self.from_addr = from_addr self.reply_to", "def params_hook(params, _build): # Map `cbb_config` to a builder name.", "= '%s:%s' % (parsed_job['user'], parsed_job['name']) for bot in parsed_job['bot']: builder_name", "only accept changes with 1 diff file. raise BadJobfile( 'Try", "email address to display as being sent from. reply_to: The", "<= prev_ver: raise AssertionError('translation function %s not incrementing version!' %", "received data and send the queue buildset.\"\"\" # Find poller", "parameters: [{'author': {'email': '<EMAIL>'}}]. - `cbb_config` property must be set", "based on its config. If an 'etc' builder is configured", "# Gitiles. _GITILES_PATH_TMPL = '%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod def updateJobDesc(cls, parsed_job): \"\"\"Ensure", "license that can be # found in the LICENSE file.", "format is not backwards compatible. if any(a.startswith('--remote-patches') for a in", "(bot, e.message)) if error_msgs: raise BadJobfile('\\n'.join(error_msgs)) def get_props(self, config, options):", "change, _important): \"\"\"Process the received data and send the queue", "extra_args = options.get('extra_args') if extra_args: # This field can be", "# Use of this source code is governed by a", "parsed_job['bot']: builder_name = self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating '%s' try job(s) %s for", "if not self.etc_builder: raise ValueError('etc builder is not configured.') if", "BuildBot properties may be added. NOTE: Internally, all of these", "to build. params['properties'] = properties c['buildbucket_params_hook'] = params_hook def GetBuilderForConfig(self,", "+ base64.b64encode(zlib.compress(json.dumps( extra_args))) props.setProperty('cbb_extra_args', extra_args, self._PROPERTY_SOURCE) return props def create_buildset(self,", "not self.etc_builder: raise ValueError('etc builder is not configured.') if not", "email/__init__.py that triggers pylint false positives. # pylint: disable=E0611,F0401 from", "get_props(self, config, options): \"\"\"Overriding base class method.\"\"\" props = Properties()", "to display as being sent from. reply_to: The email address", "(c) 2012 The Chromium Authors. All rights reserved. # Use", "validate_job(self, parsed_job): # A list of field description tuples of", "to put in the 'Reply-To' email header field. email_footer: The", "_PROPERTY_SOURCE = 'Try Job' # The version of tryjob that", "it, and prefix it with \"z:\" so the consumer #", "a BSD-style license that can be # found in the", "'etc' builder, if available. properties: See TryBase.__init__() \"\"\" TryBase.__init__(self, name,", "Your tryjob with name '%(name)s' failed the validation step. This", "required). fields = [('name', basestring, True), ('user', basestring, True), ('email',", "str(translation_func)) def __init__(self, name, pollers, smtp_host, from_addr, reply_to, email_footer, cbuildbot_configs,", "[{'author': {'email': '<EMAIL>'}}]. - `cbb_config` property must be set to", "you still see<br>this message please contact <EMAIL>.<br> \"\"\" html_content.append(body %", "to the 'etc' builder if possible. A tryserver BuildBucket build", "properties=self.get_props(bot, parsed_job))) return defer.DeferredList(dlist) def send_validation_fail_email(self, name, emails, error): \"\"\"Notify", "A configuration set instance. Any 'bot' request outside of this", "= params_hook def GetBuilderForConfig(self, config_name): config = self.configs.get(config_name) if config:", "\"z:\" so the consumer # knows its size. extra_args =", "Please run repo sync.') parsed_job['version'] = 3 class CrOSTryJobGit(TryBase): \"\"\"Poll", "from master import gitiles_poller from master.try_job_base import BadJobfile class CbuildbotConfigs(object):", "self.smtp_host = smtp_host self.from_addr = from_addr self.reply_to = reply_to self.email_footer", "to an 'etc' builder, if available. properties: See TryBase.__init__() \"\"\"", "[]), list): raise ValueError('`extra_args` property is not a list.') if", "e: error_msgs.append(\"Invalid 'etc' build (%s): %s\" % (bot, e.message)) if", "options): \"\"\"Overriding base class method.\"\"\" props = Properties() props.setProperty('slaves_request', options.get('slaves_request',", "email_footer self.cbb = cbuildbot_configs def startService(self): TryBase.startService(self) self.startConsumingChanges() @staticmethod def", "is most likely because <br>you are running an older version", "gitiles_poller.GitilesPoller): continue if poller.repo_url == change.repository: break else: raise BadJobfile(", "a JSON list of slaves on which this build may", "configs self.etc_builder = etc_builder def AddBuildBucketHooks(self, c): \"\"\"Build mutation hook", "as up-to-date as the Chromite pin. etc_builder (str): If not", "True), ('extra_args', list, False), ('version', int, True), ('slaves_request', list, False),", "description is in the format we expect.\"\"\" while parsed_job['version'] <", "@defer.inlineCallbacks def gotChange(self, change, important): try: yield self._gotChangeImpl(change, important) except", "isinstance(properties.get('extra_args', []), list): raise ValueError('`extra_args` property is not a list.')", "import defer, reactor, utils from twisted.mail.smtp import SMTPSenderFactory from twisted.python", "set instance. Any 'bot' request outside of this list will", "from. for poller in self.pollers: if not isinstance(poller, gitiles_poller.GitilesPoller): continue", "encode it, and prefix it with \"z:\" so the consumer", "from twisted.internet import defer, reactor, utils from twisted.mail.smtp import SMTPSenderFactory", "twisted.mail.smtp import SMTPSenderFactory from twisted.python import log from common.twisted_util.response import", "ValueError('Missing required `cbb_config` property.') params['builder_name'] = self.GetBuilderForConfig(config_name) # Validate other", "diff file. raise BadJobfile( 'Try job with too many files", "if not isinstance(properties.get('extra_args', []), list): raise ValueError('`extra_args` property is not", "and send the queue buildset.\"\"\" # Find poller that this", "'%s' try job(s) %s for %s\" % (builder_name, ssid, bot))", "source code is governed by a BSD-style license that can", "changes with 1 diff file. raise BadJobfile( 'Try job with", "must have bots defined to execute. for bot in parsed_job['bot']:", "= cbuildbot_configs def startService(self): TryBase.startService(self) self.startConsumingChanges() @staticmethod def load_job(data): try:", "sync chromiumos/chromite</code> and try again. If you still see<br>this message", "tuples of the format: # (name, type, required). fields =", "html_content.append(error.replace('\\n', '<br>\\n')) html_content.append(self.email_footer) m = Message() m.set_payload('<br><br>'.join(html_content), 'utf8') m.set_type(\"text/html\") m['Date']", "field description tuples of the format: # (name, type, required).", "cls._TRYJOB_FORMAT_VERSION: prev_ver = parsed_job['version'] translation_func = cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job) if parsed_job['version']", "job related configuration. configs (dict): A dictionary of all known", "except ValueError as e: raise BadJobfile(\"Failed to parse job JSON:", "an 'etc' builder is configured and the config name is", "# email/__init__.py that triggers pylint false positives. # pylint: disable=E0611,F0401", "to BuildBot properties and referenced as such in other areas", "def validate_job(self, parsed_job): # A list of field description tuples", "% {'name': name}) html_content.append(\"Extra error information:\") html_content.append(error.replace('\\n', '<br>\\n')) html_content.append(self.email_footer) m", "def AddBuildBucketHooks(self, c): \"\"\"Build mutation hook called via BuildBucket when", "config_name: raise ValueError('Empty config name') if not self._ETC_TARGET_RE.match(config_name): raise ValueError('invalid", "is specified in the `cbb_config` property. The callback transforms that", "targets. Specifically, this ensures: # - The build name doesn't", "reply_to: The email address to put in the 'Reply-To' email", "%s for %s\" % (builder_name, ssid, bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name, external_idstring=buildset_name,", "self.etc_builder def ValidateEtcBuild(self, config_name): \"\"\"Tests whether a specified build config_name", "@defer.inlineCallbacks def _gotChangeImpl(self, change, _important): \"\"\"Process the received data and", "= configs self.etc_builder = etc_builder def AddBuildBucketHooks(self, c): \"\"\"Build mutation", "as e: error_msgs.append(\"Invalid 'etc' build (%s): %s\" % (bot, e.message))", "config_name: raise ValueError('Missing required `cbb_config` property.') params['builder_name'] = self.GetBuilderForConfig(config_name) #", "[('name', basestring, True), ('user', basestring, True), ('email', list, True), ('bot',", "The callback transforms that property to an actual waterfall builder", "to try.\"\"\" # Name of property source for generated properties.", "\"\"\" if not self.etc_builder: raise ValueError('etc builder is not configured.')", "build takes the form: - Empty `builder_name` parameter. If one", "%s not incrementing version!' % str(translation_func)) def __init__(self, name, pollers,", "if possible. A tryserver BuildBucket build takes the form: -", "size. extra_args = 'z:' + base64.b64encode(zlib.compress(json.dumps( extra_args))) props.setProperty('cbb_extra_args', extra_args, self._PROPERTY_SOURCE)", "properties and referenced as such in other areas of code.", "If an 'etc' builder is configured and the config name", "= formatdate(localtime=True) m['Subject'] = 'Tryjob failed validation' m['From'] = self.from_addr", "chromiumos/chromite</code> and try again. If you still see<br>this message please", "changes can be added by including one or more BuildBucket", "whether a specified build config_name is candidate for etc build.", "--remote-patches format is not backwards compatible. if any(a.startswith('--remote-patches') for a", "of cbuildbot. Please run <br><code>repo sync chromiumos/chromite</code> and try again.", "buildbot.schedulers.trysched import TryBase from twisted.internet import defer, reactor, utils from", "import os import re import shutil import zlib from StringIO", "error_msgs: raise BadJobfile('\\n'.join(error_msgs)) def get_props(self, config, options): \"\"\"Overriding base class", "ValueError('Empty config name') if not self._ETC_TARGET_RE.match(config_name): raise ValueError('invalid etc config", "about the tryjob error.\"\"\" html_content = [] html_content.append('<html><body>') body =", "def loadGitilesChangeFile(self, poller, change): if len(change.files) != 1: # We", "this list will go to an 'etc' builder, if available.", "the consumer # knows its size. extra_args = 'z:' +", "= '%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod def updateJobDesc(cls, parsed_job): \"\"\"Ensure job description is", "repository=change.repository, changeids=[change.number]) yield self.create_buildset(ssid, parsed) @defer.inlineCallbacks def loadGitilesChangeFile(self, poller, change):", "from tryjob v.2 to ' 'v.3. Please run repo sync.')", "likely because <br>you are running an older version of cbuildbot.", "a JSON list of additional parameters to pass to the", "out. cbuildbot_configs: (CbuildbotConfigs) A configuration set instance. Any 'bot' request", "tryjob with name '%(name)s' failed the validation step. This is", "with name '%(name)s' failed the validation step. This is most", "config, self._PROPERTY_SOURCE) extra_args = options.get('extra_args') if extra_args: # This field", "`cbb_config` property. The callback transforms that property to an actual", "{'email': '<EMAIL>'}}]. - `cbb_config` property must be set to the", "translate --remote-patches from tryjob v.2 to ' 'v.3. Please run", "def __init__(self, configs, etc_builder=None): \"\"\"Holds base state of the master's", "= reply_to self.email_footer = email_footer self.cbb = cbuildbot_configs def startService(self):", "the user via email about the tryjob error.\"\"\" html_content =", "available. properties: See TryBase.__init__() \"\"\" TryBase.__init__(self, name, [], properties or", "list of additional parameters to pass to the tryjob. -", "int, True), ('slaves_request', list, False), ] error_msgs = [] for", "unsupported repository %s' % change.repository) # pylint: disable=W0631 file_contents =", "params_hook(params, _build): # Map `cbb_config` to a builder name. properties", "of these parameters are converted to BuildBot properties and referenced", "etc_builder (str): If not None, the name of the etc", "html_content.append('<html><body>') body = \"\"\" Your tryjob with name '%(name)s' failed", "try job scheduler: %s' % (e,)) import traceback traceback.print_exc() @defer.inlineCallbacks", "spill into extra args). _ETC_TARGET_RE = re.compile(r'^[a-zA-Z][\\w-]+\\w$') def __init__(self, configs,", "important) except Exception as e: log.msg('Exception in try job scheduler:", "ValidateEtcBuild(self, config_name): \"\"\"Tests whether a specified build config_name is candidate", "(%s): %s\" % (bot, e.message)) if error_msgs: raise BadJobfile('\\n'.join(error_msgs)) def", "referenced as such in other areas of code. The Git", "a ValueError if an etc build cannot be dispatched. \"\"\"", "(e.message,)) def validate_job(self, parsed_job): # A list of field description", "address to put in the 'Reply-To' email header field. email_footer:", "= etc_builder def AddBuildBucketHooks(self, c): \"\"\"Build mutation hook called via", "ValueError as e: error_msgs.append(\"Invalid 'etc' build (%s): %s\" % (bot,", "address to display as being sent from. reply_to: The email", "reply_to self.email_footer = email_footer self.cbb = cbuildbot_configs def startService(self): TryBase.startService(self)", "be # found in the LICENSE file. import base64 import", "Message from email.Utils import formatdate except ImportError: raise from buildbot.process.properties", "'etc' build (%s): %s\" % (bot, e.message)) if error_msgs: raise", "CrOSTryJobGit(TryBase): \"\"\"Poll a Git server to grab patches to try.\"\"\"", "\"\"\"Poll a Git server to grab patches to try.\"\"\" #", "to execute. for bot in parsed_job['bot']: if bot in self.cbb.configs:", "try: parsed = self.load_job(file_contents) self.validate_job(parsed) self.updateJobDesc(parsed) except BadJobfile as e:", "config_name): config = self.configs.get(config_name) if config: return config['_template'] or config_name", "isinstance(val, f_type): error_msgs.append('Option %s of wrong type!' % name) #", "re import shutil import zlib from StringIO import StringIO try:", "its size. extra_args = 'z:' + base64.b64encode(zlib.compress(json.dumps( extra_args))) props.setProperty('cbb_extra_args', extra_args,", "that this change came from. for poller in self.pollers: if", "any emails sent out. cbuildbot_configs: (CbuildbotConfigs) A configuration set instance.", "The Chromium Authors. All rights reserved. # Use of this", "formatdate(localtime=True) m['Subject'] = 'Tryjob failed validation' m['From'] = self.from_addr m['Reply-To']", "= yield self.master.db.sourcestamps.addSourceStamp( branch=change.branch, revision=change.revision, project=change.project, repository=change.repository, changeids=[change.number]) yield self.create_buildset(ssid,", "configuration set instance. Any 'bot' request outside of this list", "@classmethod def updateJobDesc(cls, parsed_job): \"\"\"Ensure job description is in the", "builder, if available. properties: See TryBase.__init__() \"\"\" TryBase.__init__(self, name, [],", "to V2.\"\"\" parsed_job.setdefault('extra_args', []).append('--remote-trybot') parsed_job['version'] = 2 def translate_v2_to_v3(parsed_job): \"\"\"Translate", "try.\"\"\" # Name of property source for generated properties. _PROPERTY_SOURCE", "not isinstance(val, f_type): error_msgs.append('Option %s of wrong type!' % name)", "that the master is expecting. _TRYJOB_FORMAT_VERSION = 3 # Functions", "put in the 'Reply-To' email header field. email_footer: The footer", "set, so code paths converge. \"\"\" def params_hook(params, _build): #", "etc config name (%s).' % (config_name,)) def translate_v1_to_v2(parsed_job): \"\"\"Translate tryjob", "version!' % str(translation_func)) def __init__(self, name, pollers, smtp_host, from_addr, reply_to,", "cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job) if parsed_job['version'] <= prev_ver: raise AssertionError('translation function %s", "send_validation_fail_email(self, name, emails, error): \"\"\"Notify the user via email about", "build cannot be dispatched. \"\"\" if not self.etc_builder: raise ValueError('etc", "json import os import re import shutil import zlib from", "out error emails. from_addr: The email address to display as", "this build may run. - Additional BuildBot properties may be", "candidate for etc build. Raises a ValueError if an etc", "= [('name', basestring, True), ('user', basestring, True), ('email', list, True),", "not configured.') if not config_name: raise ValueError('Empty config name') if", "for bot in parsed_job['bot']: builder_name = self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating '%s' try", "Copyright (c) 2012 The Chromium Authors. All rights reserved. #", "{}) self.pollers = pollers self.smtp_host = smtp_host self.from_addr = from_addr", "props.setProperty('slaves_request', options.get('slaves_request', []), self._PROPERTY_SOURCE) props.setProperty('cbb_config', config, self._PROPERTY_SOURCE) extra_args = options.get('extra_args')", "'z:' + base64.b64encode(zlib.compress(json.dumps( extra_args))) props.setProperty('cbb_extra_args', extra_args, self._PROPERTY_SOURCE) return props def", "can be quite large, and exceed BuildBot property limits. #", "% (parsed_job['user'], parsed_job['name']) for bot in parsed_job['bot']: builder_name = self.cbb.GetBuilderForConfig(bot)", "or more BuildBucket `changes` parameters: [{'author': {'email': '<EMAIL>'}}]. - `cbb_config`", "one or more BuildBucket `changes` parameters: [{'author': {'email': '<EMAIL>'}}]. -", "try: # Create a block to work around evil sys.modules", "file from # Gitiles. _GITILES_PATH_TMPL = '%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod def updateJobDesc(cls,", "m = Message() m.set_payload('<br><br>'.join(html_content), 'utf8') m.set_type(\"text/html\") m['Date'] = formatdate(localtime=True) m['Subject']", "is supplied, it will be ignored. - BuildBot changes can", "<br><code>repo sync chromiumos/chromite</code> and try again. If you still see<br>this", "str(e)) raise # The sourcestamp/buildsets created will be merge-able. ssid", "generated properties. _PROPERTY_SOURCE = 'Try Job' # The version of", "\"\"\"Ensure job description is in the format we expect.\"\"\" while", "paths converge. \"\"\" def params_hook(params, _build): # Map `cbb_config` to", "actual waterfall builder name by mapping it based on its", "e: log.msg('Exception in try job scheduler: %s' % (e,)) import", "not a list.') # Add mandatory properties to build. params['properties']", "- The build name doesn't begin with a flag ('--')", "elif not isinstance(val, f_type): error_msgs.append('Option %s of wrong type!' %", "build. try: self.cbb.ValidateEtcBuild(bot) except ValueError as e: error_msgs.append(\"Invalid 'etc' build", "more BuildBucket `changes` parameters: [{'author': {'email': '<EMAIL>'}}]. - `cbb_config` property", "still see<br>this message please contact <EMAIL>.<br> \"\"\" html_content.append(body % {'name':", "- `slaves_request` property (optional) may be a JSON list of", "V2.\"\"\" parsed_job.setdefault('extra_args', []).append('--remote-trybot') parsed_job['version'] = 2 def translate_v2_to_v3(parsed_job): \"\"\"Translate tryjob", "extra_args = 'z:' + base64.b64encode(zlib.compress(json.dumps( extra_args))) props.setProperty('cbb_extra_args', extra_args, self._PROPERTY_SOURCE) return", "import json import os import re import shutil import zlib", "to the build's cbuildbot config target. - `extra_args` property (optional)", "evil sys.modules manipulation in # email/__init__.py that triggers pylint false", "to pass to the tryjob. - `slaves_request` property (optional) may", "to V3.\"\"\" # V3 --remote-patches format is not backwards compatible.", "email_footer, cbuildbot_configs, properties=None): \"\"\"Initialize the class. Arguments: name: See TryBase.__init__().", "JSON: %s\" % (e.message,)) def validate_job(self, parsed_job): # A list", "cbuildbot_configs: (CbuildbotConfigs) A configuration set instance. Any 'bot' request outside", "# This field can be quite large, and exceed BuildBot", "except BadJobfile as e: self.send_validation_fail_email(parsed.setdefault('name', ''), parsed['email'], str(e)) raise #", "translation_func(parsed_job) if parsed_job['version'] <= prev_ver: raise AssertionError('translation function %s not", "__init__(self, name, pollers, smtp_host, from_addr, reply_to, email_footer, cbuildbot_configs, properties=None): \"\"\"Initialize", "LICENSE file. import base64 import json import os import re", "formatdate except ImportError: raise from buildbot.process.properties import Properties from buildbot.schedulers.trysched", "flag ('--') # - The build name doesn't contain spaces", "for poller in self.pollers: if not isinstance(poller, gitiles_poller.GitilesPoller): continue if", "'<EMAIL>'}}]. - `cbb_config` property must be set to the build's", "is not backwards compatible. if any(a.startswith('--remote-patches') for a in parsed_job.get('extra_args',", "`cbb_config` to a builder name. properties = params.get('properties', {}) config_name", "self.create_buildset(ssid, parsed) @defer.inlineCallbacks def loadGitilesChangeFile(self, poller, change): if len(change.files) !=", "user via email about the tryjob error.\"\"\" html_content = []", "pollers. smtp_host: The smtp host for sending out error emails.", "for sending out error emails. from_addr: The email address to", "modified file. path = self._GITILES_PATH_TMPL % { 'repo': poller.repo_path, 'revision':", "smtp host for sending out error emails. from_addr: The email", "parsed_job.setdefault('extra_args', []).append('--remote-trybot') parsed_job['version'] = 2 def translate_v2_to_v3(parsed_job): \"\"\"Translate tryjob desc", "Job' # The version of tryjob that the master is", "be added by including one or more BuildBucket `changes` parameters:", "parsed) @defer.inlineCallbacks def loadGitilesChangeFile(self, poller, change): if len(change.files) != 1:", "bots defined to execute. for bot in parsed_job['bot']: if bot", "self.reply_to = reply_to self.email_footer = email_footer self.cbb = cbuildbot_configs def", "())): raise BadJobfile('Cannot translate --remote-patches from tryjob v.2 to '", "of additional parameters to pass to the tryjob. - `slaves_request`", "None: if required: error_msgs.append('Option %s missing!' % name) elif not", "required in fields: val = parsed_job.get(name) if val is None:", "the tryjob error.\"\"\" html_content = [] html_content.append('<html><body>') body = \"\"\"", "fields: val = parsed_job.get(name) if val is None: if required:", "parsed_job['bot']: if bot in self.cbb.configs: continue # Assert that this", "= 'Tryjob failed validation' m['From'] = self.from_addr m['Reply-To'] = self.reply_to", "% name) elif not isinstance(val, f_type): error_msgs.append('Option %s of wrong", "from_addr, reply_to, email_footer, cbuildbot_configs, properties=None): \"\"\"Initialize the class. Arguments: name:", "in self.pollers: if not isinstance(poller, gitiles_poller.GitilesPoller): continue if poller.repo_url ==", "file_contents = yield self.loadGitilesChangeFile(poller, change) parsed = {} try: parsed", "other fields. if not isinstance(properties.get('extra_args', []), list): raise ValueError('`extra_args` property", "2 def translate_v2_to_v3(parsed_job): \"\"\"Translate tryjob desc from V2 to V3.\"\"\"", "we're an 'etc' job, we must have bots defined to", "it based on its config. If an 'etc' builder is", "mandatory properties to build. params['properties'] = properties c['buildbucket_params_hook'] = params_hook", "sending out error emails. from_addr: The email address to display", "change.repository: break else: raise BadJobfile( 'Received tryjob from unsupported repository", "change.files[0], } contents_b64 = yield poller.agent.request('GET', path, retry=5, protocol=StringResponse.Get) defer.returnValue(base64.b64decode(contents_b64))", "props = Properties() props.setProperty('slaves_request', options.get('slaves_request', []), self._PROPERTY_SOURCE) props.setProperty('cbb_config', config, self._PROPERTY_SOURCE)", "governed by a BSD-style license that can be # found", "of the etc builder. \"\"\" self.configs = configs self.etc_builder =", "= self.reply_to result = defer.Deferred() sender_factory = SMTPSenderFactory(self.from_addr, emails, StringIO(m.as_string()),", "source for generated properties. _PROPERTY_SOURCE = 'Try Job' # The", "validation step. This is most likely because <br>you are running", "builderNames=[builder_name], properties=self.get_props(bot, parsed_job))) return defer.DeferredList(dlist) def send_validation_fail_email(self, name, emails, error):", "properties to build. params['properties'] = properties c['buildbucket_params_hook'] = params_hook def", "# pylint: disable=E0611,F0401 from email.Message import Message from email.Utils import", "related configuration. configs (dict): A dictionary of all known CrOS", "Git poller also constructs the same property set, so code", "list of slaves on which this build may run. -", "= Message() m.set_payload('<br><br>'.join(html_content), 'utf8') m.set_type(\"text/html\") m['Date'] = formatdate(localtime=True) m['Subject'] =", "class CrOSTryJobGit(TryBase): \"\"\"Poll a Git server to grab patches to", "properties c['buildbucket_params_hook'] = params_hook def GetBuilderForConfig(self, config_name): config = self.configs.get(config_name)", "# knows its size. extra_args = 'z:' + base64.b64encode(zlib.compress(json.dumps( extra_args)))", "gotChange(self, change, important): try: yield self._gotChangeImpl(change, important) except Exception as", "self.pollers: if not isinstance(poller, gitiles_poller.GitilesPoller): continue if poller.repo_url == change.repository:", "parsed_job['version'] = 2 def translate_v2_to_v3(parsed_job): \"\"\"Translate tryjob desc from V2", "1: # We only accept changes with 1 diff file.", "via email about the tryjob error.\"\"\" html_content = [] html_content.append('<html><body>')", "%s missing!' % name) elif not isinstance(val, f_type): error_msgs.append('Option %s", "such in other areas of code. The Git poller also", "base state of the master's try job related configuration. configs", "callback transforms that property to an actual waterfall builder name", "converge. \"\"\" def params_hook(params, _build): # Map `cbb_config` to a", "builder name. properties = params.get('properties', {}) config_name = properties.get('cbb_config') if", "cannot be dispatched. \"\"\" if not self.etc_builder: raise ValueError('etc builder", "in self.cbb.configs: continue # Assert that this is a valid", "be ignored. - BuildBot changes can be added by including", "Empty `builder_name` parameter. If one is supplied, it will be", "etc build. Raises a ValueError if an etc build cannot", "def ValidateEtcBuild(self, config_name): \"\"\"Tests whether a specified build config_name is", "because <br>you are running an older version of cbuildbot. Please", "outside of this list will go to an 'etc' builder,", "not backwards compatible. if any(a.startswith('--remote-patches') for a in parsed_job.get('extra_args', ())):", "twisted.internet import defer, reactor, utils from twisted.mail.smtp import SMTPSenderFactory from", "[], properties or {}) self.pollers = pollers self.smtp_host = smtp_host", "be quite large, and exceed BuildBot property limits. # Compress", "Functions that translate from one tryjob version to another. _TRANSLATION_FUNCS", "into extra args). _ETC_TARGET_RE = re.compile(r'^[a-zA-Z][\\w-]+\\w$') def __init__(self, configs, etc_builder=None):", "raise BadJobfile( 'Received tryjob from unsupported repository %s' % change.repository)", "compatible. if any(a.startswith('--remote-patches') for a in parsed_job.get('extra_args', ())): raise BadJobfile('Cannot", "os import re import shutil import zlib from StringIO import", "and exceed BuildBot property limits. # Compress it, Base64 encode", "Exception as e: log.msg('Exception in try job scheduler: %s' %", "__init__(self, configs, etc_builder=None): \"\"\"Holds base state of the master's try", "target. - `extra_args` property (optional) may be a JSON list", "fields. if not isinstance(properties.get('extra_args', []), list): raise ValueError('`extra_args` property is", "version of cbuildbot. Please run <br><code>repo sync chromiumos/chromite</code> and try", "bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name, external_idstring=buildset_name, builderNames=[builder_name], properties=self.get_props(bot, parsed_job))) return defer.DeferredList(dlist) def", "The sourcestamp/buildsets created will be merge-able. ssid = yield self.master.db.sourcestamps.addSourceStamp(", "config: return config['_template'] or config_name self.ValidateEtcBuild(config_name) return self.etc_builder def ValidateEtcBuild(self,", "will be as up-to-date as the Chromite pin. etc_builder (str):", ": translate_v1_to_v2, 2 : translate_v2_to_v3, } # Template path URL", "self.GetBuilderForConfig(config_name) # Validate other fields. if not isinstance(properties.get('extra_args', []), list):", "not isinstance(poller, gitiles_poller.GitilesPoller): continue if poller.repo_url == change.repository: break else:", "def updateJobDesc(cls, parsed_job): \"\"\"Ensure job description is in the format", "send the queue buildset.\"\"\" # Find poller that this change", "version of tryjob that the master is expecting. _TRYJOB_FORMAT_VERSION =", "'%s:%s' % (parsed_job['user'], parsed_job['name']) for bot in parsed_job['bot']: builder_name =", "builder is configured and the config name is unknown, it", "be added. NOTE: Internally, all of these parameters are converted", "builder. \"\"\" self.configs = configs self.etc_builder = etc_builder def AddBuildBucketHooks(self,", "# pylint: disable=W0631 file_contents = yield self.loadGitilesChangeFile(poller, change) parsed =", "self.etc_builder: raise ValueError('etc builder is not configured.') if not config_name:", "run repo sync.') parsed_job['version'] = 3 class CrOSTryJobGit(TryBase): \"\"\"Poll a", "traceback.print_exc() @defer.inlineCallbacks def _gotChangeImpl(self, change, _important): \"\"\"Process the received data", "data and send the queue buildset.\"\"\" # Find poller that", "state of the master's try job related configuration. configs (dict):", "= { 1 : translate_v1_to_v2, 2 : translate_v2_to_v3, } #", "a list.') if not isinstance(properties.get('slaves_request', []), list): raise ValueError('`slaves_request` is", "raise AssertionError('translation function %s not incrementing version!' % str(translation_func)) def", "build name doesn't begin with a flag ('--') # -", "parsed_job))) return defer.DeferredList(dlist) def send_validation_fail_email(self, name, emails, error): \"\"\"Notify the", "an actual waterfall builder name by mapping it based on", "raise BadJobfile( 'Try job with too many files %s' %", "common.twisted_util.response import StringResponse from master import gitiles_poller from master.try_job_base import", "# Functions that translate from one tryjob version to another.", "BadJobfile class CbuildbotConfigs(object): # Valid 'etc' builder targets. Specifically, this", "= [] for name, f_type, required in fields: val =", "parsed['email'], str(e)) raise # The sourcestamp/buildsets created will be merge-able.", "= options.get('extra_args') if extra_args: # This field can be quite", "with a flag ('--') # - The build name doesn't", "try job related configuration. configs (dict): A dictionary of all", "{ 1 : translate_v1_to_v2, 2 : translate_v2_to_v3, } # Template", "# V3 --remote-patches format is not backwards compatible. if any(a.startswith('--remote-patches')", "(optional) may be a JSON list of additional parameters to", "sent from. reply_to: The email address to put in the", "e: raise BadJobfile(\"Failed to parse job JSON: %s\" % (e.message,))", "If not None, the name of the etc builder. \"\"\"", "translate_v2_to_v3(parsed_job): \"\"\"Translate tryjob desc from V2 to V3.\"\"\" # V3", "params_hook def GetBuilderForConfig(self, config_name): config = self.configs.get(config_name) if config: return", "= re.compile(r'^[a-zA-Z][\\w-]+\\w$') def __init__(self, configs, etc_builder=None): \"\"\"Holds base state of", "list.') if not isinstance(properties.get('slaves_request', []), list): raise ValueError('`slaves_request` is not", "`changes` parameters: [{'author': {'email': '<EMAIL>'}}]. - `cbb_config` property must be", "not config_name: raise ValueError('Missing required `cbb_config` property.') params['builder_name'] = self.GetBuilderForConfig(config_name)", "master import gitiles_poller from master.try_job_base import BadJobfile class CbuildbotConfigs(object): #", "external_idstring=buildset_name, builderNames=[builder_name], properties=self.get_props(bot, parsed_job))) return defer.DeferredList(dlist) def send_validation_fail_email(self, name, emails,", "BuildBucket build takes the form: - Empty `builder_name` parameter. If", "BuildBucket `changes` parameters: [{'author': {'email': '<EMAIL>'}}]. - `cbb_config` property must", "import StringIO try: # Create a block to work around", "mapped to the 'etc' builder if possible. A tryserver BuildBucket", "extra_args, self._PROPERTY_SOURCE) return props def create_buildset(self, ssid, parsed_job): \"\"\"Overriding base", "import gitiles_poller from master.try_job_base import BadJobfile class CbuildbotConfigs(object): # Valid", "on its config. If an 'etc' builder is configured and", "if error_msgs: raise BadJobfile('\\n'.join(error_msgs)) def get_props(self, config, options): \"\"\"Overriding base", "in try job scheduler: %s' % (e,)) import traceback traceback.print_exc()", "%s' % (','.join(change.files))) # Load the contents of the modified", "self.cbb = cbuildbot_configs def startService(self): TryBase.startService(self) self.startConsumingChanges() @staticmethod def load_job(data):", "== change.repository: break else: raise BadJobfile( 'Received tryjob from unsupported", "step. This is most likely because <br>you are running an", "StringIO import StringIO try: # Create a block to work", "build. params['properties'] = properties c['buildbucket_params_hook'] = params_hook def GetBuilderForConfig(self, config_name):", "AddBuildBucketHooks(self, c): \"\"\"Build mutation hook called via BuildBucket when scheduling", "job, we must have bots defined to execute. for bot", "or {}) self.pollers = pollers self.smtp_host = smtp_host self.from_addr =", "error.\"\"\" html_content = [] html_content.append('<html><body>') body = \"\"\" Your tryjob", "valid 'etc' build. try: self.cbb.ValidateEtcBuild(bot) except ValueError as e: error_msgs.append(\"Invalid", "[] buildset_name = '%s:%s' % (parsed_job['user'], parsed_job['name']) for bot in", "tryserver BuildBucket build takes the form: - Empty `builder_name` parameter.", "name by mapping it based on its config. If an", "the form: - Empty `builder_name` parameter. If one is supplied,", "all of these parameters are converted to BuildBot properties and", "be a JSON list of slaves on which this build", "gitiles_poller from master.try_job_base import BadJobfile class CbuildbotConfigs(object): # Valid 'etc'", "so the consumer # knows its size. extra_args = 'z:'", "parsed_job): \"\"\"Ensure job description is in the format we expect.\"\"\"", "as such in other areas of code. The Git poller", "yield self._gotChangeImpl(change, important) except Exception as e: log.msg('Exception in try", "build config_name is candidate for etc build. Raises a ValueError", "args). _ETC_TARGET_RE = re.compile(r'^[a-zA-Z][\\w-]+\\w$') def __init__(self, configs, etc_builder=None): \"\"\"Holds base", "TryBase.__init__(self, name, [], properties or {}) self.pollers = pollers self.smtp_host", "'etc' builder targets. Specifically, this ensures: # - The build", "re.compile(r'^[a-zA-Z][\\w-]+\\w$') def __init__(self, configs, etc_builder=None): \"\"\"Holds base state of the", "def translate_v2_to_v3(parsed_job): \"\"\"Translate tryjob desc from V2 to V3.\"\"\" #", "\"\"\"Initialize the class. Arguments: name: See TryBase.__init__(). pollers: A list", "The footer to append to any emails sent out. cbuildbot_configs:", "self.updateJobDesc(parsed) except BadJobfile as e: self.send_validation_fail_email(parsed.setdefault('name', ''), parsed['email'], str(e)) raise", "load_job(data): try: return json.loads(data) except ValueError as e: raise BadJobfile(\"Failed", "'repo': poller.repo_path, 'revision': change.revision, 'path': change.files[0], } contents_b64 = yield", "properties: See TryBase.__init__() \"\"\" TryBase.__init__(self, name, [], properties or {})", "additional parameters to pass to the tryjob. - `slaves_request` property", "Authors. All rights reserved. # Use of this source code", "e.message)) if error_msgs: raise BadJobfile('\\n'.join(error_msgs)) def get_props(self, config, options): \"\"\"Overriding", "for %s\" % (builder_name, ssid, bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name, external_idstring=buildset_name, builderNames=[builder_name],", "= properties c['buildbucket_params_hook'] = params_hook def GetBuilderForConfig(self, config_name): config =", "- `extra_args` property (optional) may be a JSON list of", "translation_func = cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job) if parsed_job['version'] <= prev_ver: raise AssertionError('translation", "(CbuildbotConfigs) A configuration set instance. Any 'bot' request outside of", "ssid, bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name, external_idstring=buildset_name, builderNames=[builder_name], properties=self.get_props(bot, parsed_job))) return defer.DeferredList(dlist)", "raise from buildbot.process.properties import Properties from buildbot.schedulers.trysched import TryBase from", "go to an 'etc' builder, if available. properties: See TryBase.__init__()", "as e: log.msg('Exception in try job scheduler: %s' % (e,))", "parsed_job): # A list of field description tuples of the", "# The version of tryjob that the master is expecting.", "\"\"\"Notify the user via email about the tryjob error.\"\"\" html_content", "the format we expect.\"\"\" while parsed_job['version'] < cls._TRYJOB_FORMAT_VERSION: prev_ver =", "of the master's try job related configuration. configs (dict): A", "If you still see<br>this message please contact <EMAIL>.<br> \"\"\" html_content.append(body", "list): raise ValueError('`slaves_request` is not a list.') # Add mandatory", "val is None: if required: error_msgs.append('Option %s missing!' % name)", "a valid 'etc' build. try: self.cbb.ValidateEtcBuild(bot) except ValueError as e:", "self.validate_job(parsed) self.updateJobDesc(parsed) except BadJobfile as e: self.send_validation_fail_email(parsed.setdefault('name', ''), parsed['email'], str(e))", "builder_name = self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating '%s' try job(s) %s for %s\"", "the `cbb_config` property. The callback transforms that property to an", "('--') # - The build name doesn't contain spaces (to", "\"\"\" self.configs = configs self.etc_builder = etc_builder def AddBuildBucketHooks(self, c):", "config. If an 'etc' builder is configured and the config", "job description is in the format we expect.\"\"\" while parsed_job['version']", "format: # (name, type, required). fields = [('name', basestring, True),", "change, important): try: yield self._gotChangeImpl(change, important) except Exception as e:", "% str(translation_func)) def __init__(self, name, pollers, smtp_host, from_addr, reply_to, email_footer,", "loadGitilesChangeFile(self, poller, change): if len(change.files) != 1: # We only", "in the format we expect.\"\"\" while parsed_job['version'] < cls._TRYJOB_FORMAT_VERSION: prev_ver", "is not a list.') if not isinstance(properties.get('slaves_request', []), list): raise", "Name of property source for generated properties. _PROPERTY_SOURCE = 'Try", "in the `cbb_config` property. The callback transforms that property to", "Valid 'etc' builder targets. Specifically, this ensures: # - The", "except Exception as e: log.msg('Exception in try job scheduler: %s'", "# We only accept changes with 1 diff file. raise", "'bot' request outside of this list will go to an", "''), parsed['email'], str(e)) raise # The sourcestamp/buildsets created will be", "specified build config_name is candidate for etc build. Raises a", "def startService(self): TryBase.startService(self) self.startConsumingChanges() @staticmethod def load_job(data): try: return json.loads(data)", "ValueError('`extra_args` property is not a list.') if not isinstance(properties.get('slaves_request', []),", "`cbb_config` property must be set to the build's cbuildbot config", "config = self.configs.get(config_name) if config: return config['_template'] or config_name self.ValidateEtcBuild(config_name)", "reply_to, email_footer, cbuildbot_configs, properties=None): \"\"\"Initialize the class. Arguments: name: See", "poller.repo_path, 'revision': change.revision, 'path': change.files[0], } contents_b64 = yield poller.agent.request('GET',", "= defer.Deferred() sender_factory = SMTPSenderFactory(self.from_addr, emails, StringIO(m.as_string()), result) reactor.connectTCP(self.smtp_host, 25,", "A dictionary of all known CrOS configs. This will be", "to append to any emails sent out. cbuildbot_configs: (CbuildbotConfigs) A", "isinstance(properties.get('slaves_request', []), list): raise ValueError('`slaves_request` is not a list.') #", "it, Base64 encode it, and prefix it with \"z:\" so", "{}) config_name = properties.get('cbb_config') if not config_name: raise ValueError('Missing required", "as e: raise BadJobfile(\"Failed to parse job JSON: %s\" %", "doesn't contain spaces (to spill into extra args). _ETC_TARGET_RE =", "in fields: val = parsed_job.get(name) if val is None: if", "cbuildbot_configs, properties=None): \"\"\"Initialize the class. Arguments: name: See TryBase.__init__(). pollers:", "backwards compatible. if any(a.startswith('--remote-patches') for a in parsed_job.get('extra_args', ())): raise", "_TRANSLATION_FUNCS = { 1 : translate_v1_to_v2, 2 : translate_v2_to_v3, }", "BuildBot properties and referenced as such in other areas of", "will go to an 'etc' builder, if available. properties: See", "html_content.append(self.email_footer) m = Message() m.set_payload('<br><br>'.join(html_content), 'utf8') m.set_type(\"text/html\") m['Date'] = formatdate(localtime=True)", "\"\"\"Overriding base class method.\"\"\" dlist = [] buildset_name = '%s:%s'", "None, the name of the etc builder. \"\"\" self.configs =", "is unknown, it will be mapped to the 'etc' builder", "A list of field description tuples of the format: #", "# Assert that this is a valid 'etc' build. try:", "required: error_msgs.append('Option %s missing!' % name) elif not isinstance(val, f_type):", "ValueError('`slaves_request` is not a list.') # Add mandatory properties to", "# Template path URL component to retrieve the Base64 contents", "supplied, it will be ignored. - BuildBot changes can be", "it will be ignored. - BuildBot changes can be added", "name) # If we're an 'etc' job, we must have", "around evil sys.modules manipulation in # email/__init__.py that triggers pylint", "bot in self.cbb.configs: continue # Assert that this is a", "came from. for poller in self.pollers: if not isinstance(poller, gitiles_poller.GitilesPoller):", "else: raise BadJobfile( 'Received tryjob from unsupported repository %s' %", "builds. The cbuildbot config is specified in the `cbb_config` property.", "raise ValueError('invalid etc config name (%s).' % (config_name,)) def translate_v1_to_v2(parsed_job):", "may be a JSON list of additional parameters to pass", "parameters are converted to BuildBot properties and referenced as such", "not None, the name of the etc builder. \"\"\" self.configs", "self._gotChangeImpl(change, important) except Exception as e: log.msg('Exception in try job", "(parsed_job['user'], parsed_job['name']) for bot in parsed_job['bot']: builder_name = self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating", "the queue buildset.\"\"\" # Find poller that this change came", "to an actual waterfall builder name by mapping it based", "instance. Any 'bot' request outside of this list will go", "from twisted.mail.smtp import SMTPSenderFactory from twisted.python import log from common.twisted_util.response", "# Map `cbb_config` to a builder name. properties = params.get('properties',", "is a valid 'etc' build. try: self.cbb.ValidateEtcBuild(bot) except ValueError as", "' 'v.3. Please run repo sync.') parsed_job['version'] = 3 class", "the etc builder. \"\"\" self.configs = configs self.etc_builder = etc_builder", "def translate_v1_to_v2(parsed_job): \"\"\"Translate tryjob desc from V1 to V2.\"\"\" parsed_job.setdefault('extra_args',", "self.master.db.sourcestamps.addSourceStamp( branch=change.branch, revision=change.revision, project=change.project, repository=change.repository, changeids=[change.number]) yield self.create_buildset(ssid, parsed) @defer.inlineCallbacks", "by a BSD-style license that can be # found in", "prefix it with \"z:\" so the consumer # knows its", "that property to an actual waterfall builder name by mapping", "we must have bots defined to execute. for bot in", "if bot in self.cbb.configs: continue # Assert that this is", "self.ValidateEtcBuild(config_name) return self.etc_builder def ValidateEtcBuild(self, config_name): \"\"\"Tests whether a specified", "return defer.DeferredList(dlist) def send_validation_fail_email(self, name, emails, error): \"\"\"Notify the user", "list of field description tuples of the format: # (name,", "if not isinstance(poller, gitiles_poller.GitilesPoller): continue if poller.repo_url == change.repository: break", "expect.\"\"\" while parsed_job['version'] < cls._TRYJOB_FORMAT_VERSION: prev_ver = parsed_job['version'] translation_func =", "'%(name)s' failed the validation step. This is most likely because", "Internally, all of these parameters are converted to BuildBot properties", "in parsed_job['bot']: builder_name = self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating '%s' try job(s) %s", "self._PROPERTY_SOURCE) props.setProperty('cbb_config', config, self._PROPERTY_SOURCE) extra_args = options.get('extra_args') if extra_args: #", "buildset.\"\"\" # Find poller that this change came from. for", "= 3 # Functions that translate from one tryjob version", "from V2 to V3.\"\"\" # V3 --remote-patches format is not", "job(s) %s for %s\" % (builder_name, ssid, bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name,", "params['builder_name'] = self.GetBuilderForConfig(config_name) # Validate other fields. if not isinstance(properties.get('extra_args',", "return self.etc_builder def ValidateEtcBuild(self, config_name): \"\"\"Tests whether a specified build", "All rights reserved. # Use of this source code is", "contain spaces (to spill into extra args). _ETC_TARGET_RE = re.compile(r'^[a-zA-Z][\\w-]+\\w$')", "as being sent from. reply_to: The email address to put", "[]).append('--remote-trybot') parsed_job['version'] = 2 def translate_v2_to_v3(parsed_job): \"\"\"Translate tryjob desc from", "parsed_job['version'] < cls._TRYJOB_FORMAT_VERSION: prev_ver = parsed_job['version'] translation_func = cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job)", "self.cbb.configs: continue # Assert that this is a valid 'etc'", "ValueError as e: raise BadJobfile(\"Failed to parse job JSON: %s\"", "def load_job(data): try: return json.loads(data) except ValueError as e: raise", "html_content = [] html_content.append('<html><body>') body = \"\"\" Your tryjob with", "'Try job with too many files %s' % (','.join(change.files))) #", "if available. properties: See TryBase.__init__() \"\"\" TryBase.__init__(self, name, [], properties", "def send_validation_fail_email(self, name, emails, error): \"\"\"Notify the user via email", "other areas of code. The Git poller also constructs the", "properties = params.get('properties', {}) config_name = properties.get('cbb_config') if not config_name:", "will be merge-able. ssid = yield self.master.db.sourcestamps.addSourceStamp( branch=change.branch, revision=change.revision, project=change.project,", "%s' % change.repository) # pylint: disable=W0631 file_contents = yield self.loadGitilesChangeFile(poller,", "% (builder_name, ssid, bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name, external_idstring=buildset_name, builderNames=[builder_name], properties=self.get_props(bot, parsed_job)))", "property source for generated properties. _PROPERTY_SOURCE = 'Try Job' #", "for bot in parsed_job['bot']: if bot in self.cbb.configs: continue #", "is not a list.') # Add mandatory properties to build.", "= smtp_host self.from_addr = from_addr self.reply_to = reply_to self.email_footer =", "with too many files %s' % (','.join(change.files))) # Load the", "from. reply_to: The email address to put in the 'Reply-To'", "= email_footer self.cbb = cbuildbot_configs def startService(self): TryBase.startService(self) self.startConsumingChanges() @staticmethod", "we expect.\"\"\" while parsed_job['version'] < cls._TRYJOB_FORMAT_VERSION: prev_ver = parsed_job['version'] translation_func", "in parsed_job['bot']: if bot in self.cbb.configs: continue # Assert that", "parsed_job.get('extra_args', ())): raise BadJobfile('Cannot translate --remote-patches from tryjob v.2 to", "pollers self.smtp_host = smtp_host self.from_addr = from_addr self.reply_to = reply_to", "a in parsed_job.get('extra_args', ())): raise BadJobfile('Cannot translate --remote-patches from tryjob", "host for sending out error emails. from_addr: The email address", "Chromium Authors. All rights reserved. # Use of this source", "while parsed_job['version'] < cls._TRYJOB_FORMAT_VERSION: prev_ver = parsed_job['version'] translation_func = cls._TRANSLATION_FUNCS[parsed_job['version']]", "= 'Try Job' # The version of tryjob that the", "[] for name, f_type, required in fields: val = parsed_job.get(name)", "def create_buildset(self, ssid, parsed_job): \"\"\"Overriding base class method.\"\"\" dlist =", "TryBase.__init__() \"\"\" TryBase.__init__(self, name, [], properties or {}) self.pollers =", "= [] html_content.append('<html><body>') body = \"\"\" Your tryjob with name", "options.get('extra_args') if extra_args: # This field can be quite large,", "must be set to the build's cbuildbot config target. -", "of tryjob that the master is expecting. _TRYJOB_FORMAT_VERSION = 3", "to grab patches to try.\"\"\" # Name of property source", "} # Template path URL component to retrieve the Base64", "queue buildset.\"\"\" # Find poller that this change came from.", "= parsed_job['version'] translation_func = cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job) if parsed_job['version'] <= prev_ver:", "the contents of the modified file. path = self._GITILES_PATH_TMPL %", "m['Reply-To'] = self.reply_to result = defer.Deferred() sender_factory = SMTPSenderFactory(self.from_addr, emails,", "change): if len(change.files) != 1: # We only accept changes", "with \"z:\" so the consumer # knows its size. extra_args", "if required: error_msgs.append('Option %s missing!' % name) elif not isinstance(val,", "= self.GetBuilderForConfig(config_name) # Validate other fields. if not isinstance(properties.get('extra_args', []),", "may run. - Additional BuildBot properties may be added. NOTE:", "an older version of cbuildbot. Please run <br><code>repo sync chromiumos/chromite</code>", "be as up-to-date as the Chromite pin. etc_builder (str): If", "any(a.startswith('--remote-patches') for a in parsed_job.get('extra_args', ())): raise BadJobfile('Cannot translate --remote-patches", "poller.repo_url == change.repository: break else: raise BadJobfile( 'Received tryjob from", "Load the contents of the modified file. path = self._GITILES_PATH_TMPL", "%s\" % (builder_name, ssid, bot)) dlist.append(self.addBuildsetForSourceStamp(ssid=ssid, reason=buildset_name, external_idstring=buildset_name, builderNames=[builder_name], properties=self.get_props(bot,", "'Try Job' # The version of tryjob that the master", "translate_v2_to_v3, } # Template path URL component to retrieve the", "all known CrOS configs. This will be as up-to-date as", "for name, f_type, required in fields: val = parsed_job.get(name) if", "email about the tryjob error.\"\"\" html_content = [] html_content.append('<html><body>') body", "f_type, required in fields: val = parsed_job.get(name) if val is", "% name) # If we're an 'etc' job, we must", "specified in the `cbb_config` property. The callback transforms that property", "hook called via BuildBucket when scheduling builds. The cbuildbot config", "StringIO(m.as_string()), result) reactor.connectTCP(self.smtp_host, 25, sender_factory) @defer.inlineCallbacks def gotChange(self, change, important):", "property.') params['builder_name'] = self.GetBuilderForConfig(config_name) # Validate other fields. if not", "code is governed by a BSD-style license that can be", "Validate other fields. if not isinstance(properties.get('extra_args', []), list): raise ValueError('`extra_args`", "retrieve the Base64 contents of a file from # Gitiles.", "def GetBuilderForConfig(self, config_name): config = self.configs.get(config_name) if config: return config['_template']", "_gotChangeImpl(self, change, _important): \"\"\"Process the received data and send the", "revision=change.revision, project=change.project, repository=change.repository, changeids=[change.number]) yield self.create_buildset(ssid, parsed) @defer.inlineCallbacks def loadGitilesChangeFile(self,", "\"\"\"Overriding base class method.\"\"\" props = Properties() props.setProperty('slaves_request', options.get('slaves_request', []),", "etc_builder=None): \"\"\"Holds base state of the master's try job related", "%s' % (e,)) import traceback traceback.print_exc() @defer.inlineCallbacks def _gotChangeImpl(self, change,", "properties may be added. NOTE: Internally, all of these parameters", "set to the build's cbuildbot config target. - `extra_args` property", "= 'z:' + base64.b64encode(zlib.compress(json.dumps( extra_args))) props.setProperty('cbb_extra_args', extra_args, self._PROPERTY_SOURCE) return props", "if extra_args: # This field can be quite large, and", "a flag ('--') # - The build name doesn't contain", "props.setProperty('cbb_extra_args', extra_args, self._PROPERTY_SOURCE) return props def create_buildset(self, ssid, parsed_job): \"\"\"Overriding", "% (e.message,)) def validate_job(self, parsed_job): # A list of field", "self.load_job(file_contents) self.validate_job(parsed) self.updateJobDesc(parsed) except BadJobfile as e: self.send_validation_fail_email(parsed.setdefault('name', ''), parsed['email'],", "html_content.append(\"Extra error information:\") html_content.append(error.replace('\\n', '<br>\\n')) html_content.append(self.email_footer) m = Message() m.set_payload('<br><br>'.join(html_content),", "or config_name self.ValidateEtcBuild(config_name) return self.etc_builder def ValidateEtcBuild(self, config_name): \"\"\"Tests whether", "base64.b64encode(zlib.compress(json.dumps( extra_args))) props.setProperty('cbb_extra_args', extra_args, self._PROPERTY_SOURCE) return props def create_buildset(self, ssid,", "not isinstance(properties.get('extra_args', []), list): raise ValueError('`extra_args` property is not a", "added by including one or more BuildBucket `changes` parameters: [{'author':", "configs. This will be as up-to-date as the Chromite pin.", "except ValueError as e: error_msgs.append(\"Invalid 'etc' build (%s): %s\" %", "repository %s' % change.repository) # pylint: disable=W0631 file_contents = yield", "change came from. for poller in self.pollers: if not isinstance(poller,", "Properties from buildbot.schedulers.trysched import TryBase from twisted.internet import defer, reactor,", "contents of the modified file. path = self._GITILES_PATH_TMPL % {", "from master.try_job_base import BadJobfile class CbuildbotConfigs(object): # Valid 'etc' builder", "if not config_name: raise ValueError('Missing required `cbb_config` property.') params['builder_name'] =", "properties=None): \"\"\"Initialize the class. Arguments: name: See TryBase.__init__(). pollers: A", "and prefix it with \"z:\" so the consumer # knows", "parameter. If one is supplied, it will be ignored. -", "file. import base64 import json import os import re import", "expecting. _TRYJOB_FORMAT_VERSION = 3 # Functions that translate from one", "continue if poller.repo_url == change.repository: break else: raise BadJobfile( 'Received", "len(change.files) != 1: # We only accept changes with 1", "result) reactor.connectTCP(self.smtp_host, 25, sender_factory) @defer.inlineCallbacks def gotChange(self, change, important): try:", "This is most likely because <br>you are running an older", "isinstance(poller, gitiles_poller.GitilesPoller): continue if poller.repo_url == change.repository: break else: raise", "property set, so code paths converge. \"\"\" def params_hook(params, _build):", "(name, type, required). fields = [('name', basestring, True), ('user', basestring,", "try again. If you still see<br>this message please contact <EMAIL>.<br>", "This field can be quite large, and exceed BuildBot property", "m['Subject'] = 'Tryjob failed validation' m['From'] = self.from_addr m['Reply-To'] =", "builder name by mapping it based on its config. If", "is expecting. _TRYJOB_FORMAT_VERSION = 3 # Functions that translate from", "base class method.\"\"\" props = Properties() props.setProperty('slaves_request', options.get('slaves_request', []), self._PROPERTY_SOURCE)", "and referenced as such in other areas of code. The", "\"\"\" TryBase.__init__(self, name, [], properties or {}) self.pollers = pollers", "list): raise ValueError('`extra_args` property is not a list.') if not", "e: self.send_validation_fail_email(parsed.setdefault('name', ''), parsed['email'], str(e)) raise # The sourcestamp/buildsets created", "('bot', list, True), ('extra_args', list, False), ('version', int, True), ('slaves_request',", "traceback traceback.print_exc() @defer.inlineCallbacks def _gotChangeImpl(self, change, _important): \"\"\"Process the received", "raise ValueError('Empty config name') if not self._ETC_TARGET_RE.match(config_name): raise ValueError('invalid etc", "buildset_name = '%s:%s' % (parsed_job['user'], parsed_job['name']) for bot in parsed_job['bot']:", "# found in the LICENSE file. import base64 import json", "one tryjob version to another. _TRANSLATION_FUNCS = { 1 :", "files %s' % (','.join(change.files))) # Load the contents of the", "import shutil import zlib from StringIO import StringIO try: #", "scheduling builds. The cbuildbot config is specified in the `cbb_config`", "change.revision, 'path': change.files[0], } contents_b64 = yield poller.agent.request('GET', path, retry=5,", "not incrementing version!' % str(translation_func)) def __init__(self, name, pollers, smtp_host,", "- Additional BuildBot properties may be added. NOTE: Internally, all", "header field. email_footer: The footer to append to any emails", "property (optional) may be a JSON list of additional parameters", "running an older version of cbuildbot. Please run <br><code>repo sync", "to any emails sent out. cbuildbot_configs: (CbuildbotConfigs) A configuration set", "raise ValueError('`slaves_request` is not a list.') # Add mandatory properties", "BSD-style license that can be # found in the LICENSE", "A tryserver BuildBucket build takes the form: - Empty `builder_name`", "project=change.project, repository=change.repository, changeids=[change.number]) yield self.create_buildset(ssid, parsed) @defer.inlineCallbacks def loadGitilesChangeFile(self, poller,", "same property set, so code paths converge. \"\"\" def params_hook(params,", "BadJobfile as e: self.send_validation_fail_email(parsed.setdefault('name', ''), parsed['email'], str(e)) raise # The", "log.msg(\"Creating '%s' try job(s) %s for %s\" % (builder_name, ssid,", "description tuples of the format: # (name, type, required). fields", "see<br>this message please contact <EMAIL>.<br> \"\"\" html_content.append(body % {'name': name})", "property to an actual waterfall builder name by mapping it", "if config: return config['_template'] or config_name self.ValidateEtcBuild(config_name) return self.etc_builder def", "'<br>\\n')) html_content.append(self.email_footer) m = Message() m.set_payload('<br><br>'.join(html_content), 'utf8') m.set_type(\"text/html\") m['Date'] =", "email address to put in the 'Reply-To' email header field.", "\"\"\" html_content.append(body % {'name': name}) html_content.append(\"Extra error information:\") html_content.append(error.replace('\\n', '<br>\\n'))", "The build name doesn't contain spaces (to spill into extra", "name, pollers, smtp_host, from_addr, reply_to, email_footer, cbuildbot_configs, properties=None): \"\"\"Initialize the", "GetBuilderForConfig(self, config_name): config = self.configs.get(config_name) if config: return config['_template'] or", "%s\" % (e.message,)) def validate_job(self, parsed_job): # A list of", "doesn't begin with a flag ('--') # - The build", "of property source for generated properties. _PROPERTY_SOURCE = 'Try Job'", "(optional) may be a JSON list of slaves on which", "val = parsed_job.get(name) if val is None: if required: error_msgs.append('Option", "BadJobfile('Cannot translate --remote-patches from tryjob v.2 to ' 'v.3. Please", "self.configs = configs self.etc_builder = etc_builder def AddBuildBucketHooks(self, c): \"\"\"Build", "% (e,)) import traceback traceback.print_exc() @defer.inlineCallbacks def _gotChangeImpl(self, change, _important):", "# Find poller that this change came from. for poller", "file. path = self._GITILES_PATH_TMPL % { 'repo': poller.repo_path, 'revision': change.revision,", "Map `cbb_config` to a builder name. properties = params.get('properties', {})", "configs (dict): A dictionary of all known CrOS configs. This", "'Reply-To' email header field. email_footer: The footer to append to", "takes the form: - Empty `builder_name` parameter. If one is", "= pollers self.smtp_host = smtp_host self.from_addr = from_addr self.reply_to =", "smtp_host: The smtp host for sending out error emails. from_addr:", "name}) html_content.append(\"Extra error information:\") html_content.append(error.replace('\\n', '<br>\\n')) html_content.append(self.email_footer) m = Message()", "return json.loads(data) except ValueError as e: raise BadJobfile(\"Failed to parse", "- The build name doesn't contain spaces (to spill into", "is candidate for etc build. Raises a ValueError if an", "master is expecting. _TRYJOB_FORMAT_VERSION = 3 # Functions that translate", "ImportError: raise from buildbot.process.properties import Properties from buildbot.schedulers.trysched import TryBase", "field can be quite large, and exceed BuildBot property limits.", "extra args). _ETC_TARGET_RE = re.compile(r'^[a-zA-Z][\\w-]+\\w$') def __init__(self, configs, etc_builder=None): \"\"\"Holds", "{} try: parsed = self.load_job(file_contents) self.validate_job(parsed) self.updateJobDesc(parsed) except BadJobfile as", "\"\"\"Build mutation hook called via BuildBucket when scheduling builds. The", "ValueError if an etc build cannot be dispatched. \"\"\" if", "please contact <EMAIL>.<br> \"\"\" html_content.append(body % {'name': name}) html_content.append(\"Extra error", "SMTPSenderFactory from twisted.python import log from common.twisted_util.response import StringResponse from", "the master is expecting. _TRYJOB_FORMAT_VERSION = 3 # Functions that", "name of the etc builder. \"\"\" self.configs = configs self.etc_builder", "dlist = [] buildset_name = '%s:%s' % (parsed_job['user'], parsed_job['name']) for", "the LICENSE file. import base64 import json import os import", "= cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job) if parsed_job['version'] <= prev_ver: raise AssertionError('translation function", "2 : translate_v2_to_v3, } # Template path URL component to", "if not isinstance(properties.get('slaves_request', []), list): raise ValueError('`slaves_request` is not a", "shutil import zlib from StringIO import StringIO try: # Create", "raise ValueError('Missing required `cbb_config` property.') params['builder_name'] = self.GetBuilderForConfig(config_name) # Validate", "defer.Deferred() sender_factory = SMTPSenderFactory(self.from_addr, emails, StringIO(m.as_string()), result) reactor.connectTCP(self.smtp_host, 25, sender_factory)", "unknown, it will be mapped to the 'etc' builder if", "builder if possible. A tryserver BuildBucket build takes the form:", "poller, change): if len(change.files) != 1: # We only accept", "cbuildbot config is specified in the `cbb_config` property. The callback", "the Chromite pin. etc_builder (str): If not None, the name", "slaves on which this build may run. - Additional BuildBot", "raise BadJobfile('\\n'.join(error_msgs)) def get_props(self, config, options): \"\"\"Overriding base class method.\"\"\"", "so code paths converge. \"\"\" def params_hook(params, _build): # Map", "twisted.python import log from common.twisted_util.response import StringResponse from master import", "params['properties'] = properties c['buildbucket_params_hook'] = params_hook def GetBuilderForConfig(self, config_name): config", "this ensures: # - The build name doesn't begin with", "can be added by including one or more BuildBucket `changes`", "ssid, parsed_job): \"\"\"Overriding base class method.\"\"\" dlist = [] buildset_name", "configured and the config name is unknown, it will be", "etc builder. \"\"\" self.configs = configs self.etc_builder = etc_builder def", "type, required). fields = [('name', basestring, True), ('user', basestring, True),", "its config. If an 'etc' builder is configured and the", "bot in parsed_job['bot']: builder_name = self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating '%s' try job(s)", "(','.join(change.files))) # Load the contents of the modified file. path", "consumer # knows its size. extra_args = 'z:' + base64.b64encode(zlib.compress(json.dumps(", "props.setProperty('cbb_config', config, self._PROPERTY_SOURCE) extra_args = options.get('extra_args') if extra_args: # This", "list, True), ('bot', list, True), ('extra_args', list, False), ('version', int,", "begin with a flag ('--') # - The build name", "error_msgs.append(\"Invalid 'etc' build (%s): %s\" % (bot, e.message)) if error_msgs:", "CbuildbotConfigs(object): # Valid 'etc' builder targets. Specifically, this ensures: #", "message please contact <EMAIL>.<br> \"\"\" html_content.append(body % {'name': name}) html_content.append(\"Extra", "configs, etc_builder=None): \"\"\"Holds base state of the master's try job", "config_name is candidate for etc build. Raises a ValueError if", "another. _TRANSLATION_FUNCS = { 1 : translate_v1_to_v2, 2 : translate_v2_to_v3,", "failed validation' m['From'] = self.from_addr m['Reply-To'] = self.reply_to result =", "the validation step. This is most likely because <br>you are", "URL component to retrieve the Base64 contents of a file", "[]), self._PROPERTY_SOURCE) props.setProperty('cbb_config', config, self._PROPERTY_SOURCE) extra_args = options.get('extra_args') if extra_args:", "buildbot.process.properties import Properties from buildbot.schedulers.trysched import TryBase from twisted.internet import", "disable=W0631 file_contents = yield self.loadGitilesChangeFile(poller, change) parsed = {} try:", "name: See TryBase.__init__(). pollers: A list of job repo git", "transforms that property to an actual waterfall builder name by", "also constructs the same property set, so code paths converge.", "BadJobfile( 'Received tryjob from unsupported repository %s' % change.repository) #", "contact <EMAIL>.<br> \"\"\" html_content.append(body % {'name': name}) html_content.append(\"Extra error information:\")", "are running an older version of cbuildbot. Please run <br><code>repo", "TryBase.startService(self) self.startConsumingChanges() @staticmethod def load_job(data): try: return json.loads(data) except ValueError", "base class method.\"\"\" dlist = [] buildset_name = '%s:%s' %", "component to retrieve the Base64 contents of a file from", "desc from V2 to V3.\"\"\" # V3 --remote-patches format is", "'Tryjob failed validation' m['From'] = self.from_addr m['Reply-To'] = self.reply_to result", "to retrieve the Base64 contents of a file from #", "added. NOTE: Internally, all of these parameters are converted to", "up-to-date as the Chromite pin. etc_builder (str): If not None,", "be set to the build's cbuildbot config target. - `extra_args`", "< cls._TRYJOB_FORMAT_VERSION: prev_ver = parsed_job['version'] translation_func = cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job) if", "See TryBase.__init__(). pollers: A list of job repo git pit", "information:\") html_content.append(error.replace('\\n', '<br>\\n')) html_content.append(self.email_footer) m = Message() m.set_payload('<br><br>'.join(html_content), 'utf8') m.set_type(\"text/html\")", "an etc build cannot be dispatched. \"\"\" if not self.etc_builder:", "property. The callback transforms that property to an actual waterfall", "the master's try job related configuration. configs (dict): A dictionary", "tryjob v.2 to ' 'v.3. Please run repo sync.') parsed_job['version']", "emails. from_addr: The email address to display as being sent", "(dict): A dictionary of all known CrOS configs. This will", "from twisted.python import log from common.twisted_util.response import StringResponse from master", "tryjob from unsupported repository %s' % change.repository) # pylint: disable=W0631", "BuildBot changes can be added by including one or more", "the tryjob. - `slaves_request` property (optional) may be a JSON", "it with \"z:\" so the consumer # knows its size.", "run <br><code>repo sync chromiumos/chromite</code> and try again. If you still", "self.configs.get(config_name) if config: return config['_template'] or config_name self.ValidateEtcBuild(config_name) return self.etc_builder", "build. Raises a ValueError if an etc build cannot be", "Gitiles. _GITILES_PATH_TMPL = '%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod def updateJobDesc(cls, parsed_job): \"\"\"Ensure job", "m['From'] = self.from_addr m['Reply-To'] = self.reply_to result = defer.Deferred() sender_factory", "3 class CrOSTryJobGit(TryBase): \"\"\"Poll a Git server to grab patches", "BadJobfile(\"Failed to parse job JSON: %s\" % (e.message,)) def validate_job(self,", "pylint: disable=W0631 file_contents = yield self.loadGitilesChangeFile(poller, change) parsed = {}", "an 'etc' job, we must have bots defined to execute.", "True), ('slaves_request', list, False), ] error_msgs = [] for name,", "that triggers pylint false positives. # pylint: disable=E0611,F0401 from email.Message", "f_type): error_msgs.append('Option %s of wrong type!' % name) # If", "html_content.append(body % {'name': name}) html_content.append(\"Extra error information:\") html_content.append(error.replace('\\n', '<br>\\n')) html_content.append(self.email_footer)", "the Base64 contents of a file from # Gitiles. _GITILES_PATH_TMPL", "# The sourcestamp/buildsets created will be merge-able. ssid = yield", "config['_template'] or config_name self.ValidateEtcBuild(config_name) return self.etc_builder def ValidateEtcBuild(self, config_name): \"\"\"Tests", "when scheduling builds. The cbuildbot config is specified in the", "as the Chromite pin. etc_builder (str): If not None, the", "[] html_content.append('<html><body>') body = \"\"\" Your tryjob with name '%(name)s'", "25, sender_factory) @defer.inlineCallbacks def gotChange(self, change, important): try: yield self._gotChangeImpl(change,", "# - The build name doesn't begin with a flag", "dispatched. \"\"\" if not self.etc_builder: raise ValueError('etc builder is not", "ensures: # - The build name doesn't begin with a", "SMTPSenderFactory(self.from_addr, emails, StringIO(m.as_string()), result) reactor.connectTCP(self.smtp_host, 25, sender_factory) @defer.inlineCallbacks def gotChange(self,", "positives. # pylint: disable=E0611,F0401 from email.Message import Message from email.Utils", "\"\"\" def params_hook(params, _build): # Map `cbb_config` to a builder", "import traceback traceback.print_exc() @defer.inlineCallbacks def _gotChangeImpl(self, change, _important): \"\"\"Process the", "from email.Utils import formatdate except ImportError: raise from buildbot.process.properties import", "this change came from. for poller in self.pollers: if not", "See TryBase.__init__() \"\"\" TryBase.__init__(self, name, [], properties or {}) self.pollers", "called via BuildBucket when scheduling builds. The cbuildbot config is", "class method.\"\"\" dlist = [] buildset_name = '%s:%s' % (parsed_job['user'],", "of wrong type!' % name) # If we're an 'etc'", "'%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod def updateJobDesc(cls, parsed_job): \"\"\"Ensure job description is in", "for generated properties. _PROPERTY_SOURCE = 'Try Job' # The version", "self.cbb.ValidateEtcBuild(bot) except ValueError as e: error_msgs.append(\"Invalid 'etc' build (%s): %s\"", "if not self._ETC_TARGET_RE.match(config_name): raise ValueError('invalid etc config name (%s).' %", "etc build cannot be dispatched. \"\"\" if not self.etc_builder: raise", "`builder_name` parameter. If one is supplied, it will be ignored.", "a block to work around evil sys.modules manipulation in #", "`slaves_request` property (optional) may be a JSON list of slaves", "of a file from # Gitiles. _GITILES_PATH_TMPL = '%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod", "error): \"\"\"Notify the user via email about the tryjob error.\"\"\"", "the 'Reply-To' email header field. email_footer: The footer to append", "too many files %s' % (','.join(change.files))) # Load the contents", "= params.get('properties', {}) config_name = properties.get('cbb_config') if not config_name: raise", "config_name self.ValidateEtcBuild(config_name) return self.etc_builder def ValidateEtcBuild(self, config_name): \"\"\"Tests whether a", "defer, reactor, utils from twisted.mail.smtp import SMTPSenderFactory from twisted.python import", "@staticmethod def load_job(data): try: return json.loads(data) except ValueError as e:", "params.get('properties', {}) config_name = properties.get('cbb_config') if not config_name: raise ValueError('Missing", "V3 --remote-patches format is not backwards compatible. if any(a.startswith('--remote-patches') for", "(%s).' % (config_name,)) def translate_v1_to_v2(parsed_job): \"\"\"Translate tryjob desc from V1", "config_name): \"\"\"Tests whether a specified build config_name is candidate for", "of field description tuples of the format: # (name, type,", "%s\" % (bot, e.message)) if error_msgs: raise BadJobfile('\\n'.join(error_msgs)) def get_props(self,", "True), ('user', basestring, True), ('email', list, True), ('bot', list, True),", "of slaves on which this build may run. - Additional", "BuildBot property limits. # Compress it, Base64 encode it, and", "cbuildbot_configs def startService(self): TryBase.startService(self) self.startConsumingChanges() @staticmethod def load_job(data): try: return", "the class. Arguments: name: See TryBase.__init__(). pollers: A list of", "Add mandatory properties to build. params['properties'] = properties c['buildbucket_params_hook'] =", "quite large, and exceed BuildBot property limits. # Compress it,", "be mapped to the 'etc' builder if possible. A tryserver", "defined to execute. for bot in parsed_job['bot']: if bot in", "builder is not configured.') if not config_name: raise ValueError('Empty config", "c['buildbucket_params_hook'] = params_hook def GetBuilderForConfig(self, config_name): config = self.configs.get(config_name) if", "try: return json.loads(data) except ValueError as e: raise BadJobfile(\"Failed to", "accept changes with 1 diff file. raise BadJobfile( 'Try job", "not a list.') if not isinstance(properties.get('slaves_request', []), list): raise ValueError('`slaves_request`", "If we're an 'etc' job, we must have bots defined", "# Load the contents of the modified file. path =", "m.set_type(\"text/html\") m['Date'] = formatdate(localtime=True) m['Subject'] = 'Tryjob failed validation' m['From']", "If one is supplied, it will be ignored. - BuildBot", "to parse job JSON: %s\" % (e.message,)) def validate_job(self, parsed_job):", "property limits. # Compress it, Base64 encode it, and prefix", "builder targets. Specifically, this ensures: # - The build name", "name is unknown, it will be mapped to the 'etc'", "'path': change.files[0], } contents_b64 = yield poller.agent.request('GET', path, retry=5, protocol=StringResponse.Get)", "pin. etc_builder (str): If not None, the name of the", "startService(self): TryBase.startService(self) self.startConsumingChanges() @staticmethod def load_job(data): try: return json.loads(data) except", "cbuildbot. Please run <br><code>repo sync chromiumos/chromite</code> and try again. If", "\"\"\"Tests whether a specified build config_name is candidate for etc", "is None: if required: error_msgs.append('Option %s missing!' % name) elif", "= SMTPSenderFactory(self.from_addr, emails, StringIO(m.as_string()), result) reactor.connectTCP(self.smtp_host, 25, sender_factory) @defer.inlineCallbacks def", "method.\"\"\" dlist = [] buildset_name = '%s:%s' % (parsed_job['user'], parsed_job['name'])", "class CbuildbotConfigs(object): # Valid 'etc' builder targets. Specifically, this ensures:", "The smtp host for sending out error emails. from_addr: The", "'etc' build. try: self.cbb.ValidateEtcBuild(bot) except ValueError as e: error_msgs.append(\"Invalid 'etc'", "= properties.get('cbb_config') if not config_name: raise ValueError('Missing required `cbb_config` property.')", "2012 The Chromium Authors. All rights reserved. # Use of", "of this list will go to an 'etc' builder, if", "def __init__(self, name, pollers, smtp_host, from_addr, reply_to, email_footer, cbuildbot_configs, properties=None):", "config is specified in the `cbb_config` property. The callback transforms", "version to another. _TRANSLATION_FUNCS = { 1 : translate_v1_to_v2, 2", "display as being sent from. reply_to: The email address to", "zlib from StringIO import StringIO try: # Create a block", "error_msgs.append('Option %s of wrong type!' % name) # If we're", "= self.load_job(file_contents) self.validate_job(parsed) self.updateJobDesc(parsed) except BadJobfile as e: self.send_validation_fail_email(parsed.setdefault('name', ''),", "`extra_args` property (optional) may be a JSON list of additional", "self._PROPERTY_SOURCE) return props def create_buildset(self, ssid, parsed_job): \"\"\"Overriding base class", "Please run <br><code>repo sync chromiumos/chromite</code> and try again. If you", "Find poller that this change came from. for poller in", "that can be # found in the LICENSE file. import", "grab patches to try.\"\"\" # Name of property source for", "error emails. from_addr: The email address to display as being", "'etc' builder is configured and the config name is unknown,", "list, True), ('extra_args', list, False), ('version', int, True), ('slaves_request', list,", "'v.3. Please run repo sync.') parsed_job['version'] = 3 class CrOSTryJobGit(TryBase):", "import SMTPSenderFactory from twisted.python import log from common.twisted_util.response import StringResponse", "if len(change.files) != 1: # We only accept changes with", "Chromite pin. etc_builder (str): If not None, the name of", "is not configured.') if not config_name: raise ValueError('Empty config name')", "if an etc build cannot be dispatched. \"\"\" if not", "# Valid 'etc' builder targets. Specifically, this ensures: # -", "property must be set to the build's cbuildbot config target.", "properties.get('cbb_config') if not config_name: raise ValueError('Missing required `cbb_config` property.') params['builder_name']", "self.email_footer = email_footer self.cbb = cbuildbot_configs def startService(self): TryBase.startService(self) self.startConsumingChanges()", "raise # The sourcestamp/buildsets created will be merge-able. ssid =", "\"\"\"Translate tryjob desc from V2 to V3.\"\"\" # V3 --remote-patches", "V2 to V3.\"\"\" # V3 --remote-patches format is not backwards", "in the LICENSE file. import base64 import json import os", "# Create a block to work around evil sys.modules manipulation", "to work around evil sys.modules manipulation in # email/__init__.py that", "return props def create_buildset(self, ssid, parsed_job): \"\"\"Overriding base class method.\"\"\"", "from # Gitiles. _GITILES_PATH_TMPL = '%(repo)s/+/%(revision)s/%(path)s?format=text' @classmethod def updateJobDesc(cls, parsed_job):", "disable=E0611,F0401 from email.Message import Message from email.Utils import formatdate except", "parsed_job['version'] translation_func = cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job) if parsed_job['version'] <= prev_ver: raise", "important): try: yield self._gotChangeImpl(change, important) except Exception as e: log.msg('Exception", "possible. A tryserver BuildBucket build takes the form: - Empty", "Git server to grab patches to try.\"\"\" # Name of", "('email', list, True), ('bot', list, True), ('extra_args', list, False), ('version',", "from StringIO import StringIO try: # Create a block to", "body = \"\"\" Your tryjob with name '%(name)s' failed the", "path URL component to retrieve the Base64 contents of a", "self.cbb.GetBuilderForConfig(bot) log.msg(\"Creating '%s' try job(s) %s for %s\" % (builder_name,", "We only accept changes with 1 diff file. raise BadJobfile(", "{ 'repo': poller.repo_path, 'revision': change.revision, 'path': change.files[0], } contents_b64 =", "# If we're an 'etc' job, we must have bots", "self.send_validation_fail_email(parsed.setdefault('name', ''), parsed['email'], str(e)) raise # The sourcestamp/buildsets created will", "job JSON: %s\" % (e.message,)) def validate_job(self, parsed_job): # A", "properties. _PROPERTY_SOURCE = 'Try Job' # The version of tryjob", "c): \"\"\"Build mutation hook called via BuildBucket when scheduling builds.", "self.startConsumingChanges() @staticmethod def load_job(data): try: return json.loads(data) except ValueError as", "file. raise BadJobfile( 'Try job with too many files %s'", "by mapping it based on its config. If an 'etc'", "tryjob that the master is expecting. _TRYJOB_FORMAT_VERSION = 3 #", "is in the format we expect.\"\"\" while parsed_job['version'] < cls._TRYJOB_FORMAT_VERSION:", "format we expect.\"\"\" while parsed_job['version'] < cls._TRYJOB_FORMAT_VERSION: prev_ver = parsed_job['version']", "{'name': name}) html_content.append(\"Extra error information:\") html_content.append(error.replace('\\n', '<br>\\n')) html_content.append(self.email_footer) m =", "emails, error): \"\"\"Notify the user via email about the tryjob", "pass to the tryjob. - `slaves_request` property (optional) may be", "if any(a.startswith('--remote-patches') for a in parsed_job.get('extra_args', ())): raise BadJobfile('Cannot translate", "field. email_footer: The footer to append to any emails sent", "1 : translate_v1_to_v2, 2 : translate_v2_to_v3, } # Template path", "prev_ver = parsed_job['version'] translation_func = cls._TRANSLATION_FUNCS[parsed_job['version']] translation_func(parsed_job) if parsed_job['version'] <=", "The email address to put in the 'Reply-To' email header", "parsed = self.load_job(file_contents) self.validate_job(parsed) self.updateJobDesc(parsed) except BadJobfile as e: self.send_validation_fail_email(parsed.setdefault('name',", "many files %s' % (','.join(change.files))) # Load the contents of", "sender_factory = SMTPSenderFactory(self.from_addr, emails, StringIO(m.as_string()), result) reactor.connectTCP(self.smtp_host, 25, sender_factory) @defer.inlineCallbacks", "list, False), ] error_msgs = [] for name, f_type, required" ]
[ "9] # # Version: 1.0 # 12/22/18 by Jianfa #", "value in each row of a binary tree. # Example:", "= -sys.maxsize # Largest number of this row for i", "for i in range(len(children)): node = children[i] largest = max(node.val,", "\"\"\" :type root: TreeNode :rtype: List[int] \"\"\" if not root:", "You need to find the largest value in each row", "[1, 3, 9] # # Version: 1.0 # 12/22/18 by", "None # self.right = None class Solution: def largestValues(self, root):", "# / \\ \\ # 5 3 9 # Output:", "a binary tree node. # class TreeNode: # def __init__(self,", "self.left = None # self.right = None class Solution: def", "[root] res = [] while children: temp = [] #", "in each row of a binary tree. # Example: #", "TreeNode: # def __init__(self, x): # self.val = x #", "def __init__(self, x): # self.val = x # self.left =", "Solution: def largestValues(self, root): \"\"\" :type root: TreeNode :rtype: List[int]", "root: return [] children = [root] res = [] while", "for a binary tree node. # class TreeNode: # def", "of a binary tree. # Example: # Input: # 1", "tree node. # class TreeNode: # def __init__(self, x): #", "1 # / \\ # 3 2 # / \\", "= [] while children: temp = [] # Node of", "while children: temp = [] # Node of next row", "temp = [] # Node of next row largest =", ":type root: TreeNode :rtype: List[int] \"\"\" if not root: return", "binary tree node. # class TreeNode: # def __init__(self, x):", "# def __init__(self, x): # self.val = x # self.left", "root): \"\"\" :type root: TreeNode :rtype: List[int] \"\"\" if not", "row of a binary tree. # Example: # Input: #", "# Output: [1, 3, 9] # # Version: 1.0 #", "# Input: # 1 # / \\ # 3 2", "largest) if node.left: temp.append(node.left) if node.right: temp.append(node.right) res.append(largest) children =", "1.0 # 12/22/18 by Jianfa # ------------------------------ # Definition for", "x): # self.val = x # self.left = None #", "range(len(children)): node = children[i] largest = max(node.val, largest) if node.left:", "------------------------------ # 515. Find Largest Value in Each Tree Row", "self.right = None class Solution: def largestValues(self, root): \"\"\" :type", "3 2 # / \\ \\ # 5 3 9", "by Jianfa # ------------------------------ # Definition for a binary tree", "# ------------------------------ # Definition for a binary tree node. #", "Used for testing if __name__ == \"__main__\": test = Solution()", "Definition for a binary tree node. # class TreeNode: #", "a binary tree. # Example: # Input: # 1 #", "-sys.maxsize # Largest number of this row for i in", "not root: return [] children = [root] res = []", "node.right: temp.append(node.right) res.append(largest) children = temp return res # Used", "Output: [1, 3, 9] # # Version: 1.0 # 12/22/18", "[] while children: temp = [] # Node of next", "row largest = -sys.maxsize # Largest number of this row", "Example: # Input: # 1 # / \\ # 3", "next row largest = -sys.maxsize # Largest number of this", "children[i] largest = max(node.val, largest) if node.left: temp.append(node.left) if node.right:", "3, 9] # # Version: 1.0 # 12/22/18 by Jianfa", "[] # Node of next row largest = -sys.maxsize #", "# self.val = x # self.left = None # self.right", "= None # self.right = None class Solution: def largestValues(self,", "# You need to find the largest value in each", "i in range(len(children)): node = children[i] largest = max(node.val, largest)", "Tree Row # # Description: # You need to find", "temp return res # Used for testing if __name__ ==", "self.val = x # self.left = None # self.right =", "this row for i in range(len(children)): node = children[i] largest", "# 3 2 # / \\ \\ # 5 3", "test = Solution() # ------------------------------ # Summary: # BFS solution.", "if __name__ == \"__main__\": test = Solution() # ------------------------------ #", "Largest Value in Each Tree Row # # Description: #", "class TreeNode: # def __init__(self, x): # self.val = x", "temp.append(node.left) if node.right: temp.append(node.right) res.append(largest) children = temp return res", "Jianfa # ------------------------------ # Definition for a binary tree node.", "in range(len(children)): node = children[i] largest = max(node.val, largest) if", "of next row largest = -sys.maxsize # Largest number of", "2 # / \\ \\ # 5 3 9 #", "def largestValues(self, root): \"\"\" :type root: TreeNode :rtype: List[int] \"\"\"", "= max(node.val, largest) if node.left: temp.append(node.left) if node.right: temp.append(node.right) res.append(largest)", "max(node.val, largest) if node.left: temp.append(node.left) if node.right: temp.append(node.right) res.append(largest) children", "find the largest value in each row of a binary", "Value in Each Tree Row # # Description: # You", "9 # Output: [1, 3, 9] # # Version: 1.0", "largest = -sys.maxsize # Largest number of this row for", "# # Description: # You need to find the largest", "the largest value in each row of a binary tree.", "= [] # Node of next row largest = -sys.maxsize", "Largest number of this row for i in range(len(children)): node", "\\ \\ # 5 3 9 # Output: [1, 3,", "# 1 # / \\ # 3 2 # /", "# / \\ # 3 2 # / \\ \\", "children: temp = [] # Node of next row largest", "x # self.left = None # self.right = None class", "res = [] while children: temp = [] # Node", "/ \\ \\ # 5 3 9 # Output: [1,", "children = [root] res = [] while children: temp =", "each row of a binary tree. # Example: # Input:", "# ------------------------------ # 515. Find Largest Value in Each Tree", ":rtype: List[int] \"\"\" if not root: return [] children =", "res # Used for testing if __name__ == \"__main__\": test", "# Description: # You need to find the largest value", "[] children = [root] res = [] while children: temp", "5 3 9 # Output: [1, 3, 9] # #", "# class TreeNode: # def __init__(self, x): # self.val =", "for testing if __name__ == \"__main__\": test = Solution() #", "tree. # Example: # Input: # 1 # / \\", "= temp return res # Used for testing if __name__", "# Example: # Input: # 1 # / \\ #", "12/22/18 by Jianfa # ------------------------------ # Definition for a binary", "\"\"\" if not root: return [] children = [root] res", "Version: 1.0 # 12/22/18 by Jianfa # ------------------------------ # Definition", "# self.right = None class Solution: def largestValues(self, root): \"\"\"", "------------------------------ # Definition for a binary tree node. # class", "res.append(largest) children = temp return res # Used for testing", "in Each Tree Row # # Description: # You need", "Node of next row largest = -sys.maxsize # Largest number", "== \"__main__\": test = Solution() # ------------------------------ # Summary: #", "# # Version: 1.0 # 12/22/18 by Jianfa # ------------------------------", "__init__(self, x): # self.val = x # self.left = None", "root: TreeNode :rtype: List[int] \"\"\" if not root: return []", "= [root] res = [] while children: temp = []", "# self.left = None # self.right = None class Solution:", "to find the largest value in each row of a", "# 515. Find Largest Value in Each Tree Row #", "if not root: return [] children = [root] res =", "= None class Solution: def largestValues(self, root): \"\"\" :type root:", "3 9 # Output: [1, 3, 9] # # Version:", "Each Tree Row # # Description: # You need to", "number of this row for i in range(len(children)): node =", "Row # # Description: # You need to find the", "need to find the largest value in each row of", "# 12/22/18 by Jianfa # ------------------------------ # Definition for a", "largest value in each row of a binary tree. #", "\\ # 3 2 # / \\ \\ # 5", "node.left: temp.append(node.left) if node.right: temp.append(node.right) res.append(largest) children = temp return", "of this row for i in range(len(children)): node = children[i]", "largest = max(node.val, largest) if node.left: temp.append(node.left) if node.right: temp.append(node.right)", "None class Solution: def largestValues(self, root): \"\"\" :type root: TreeNode", "row for i in range(len(children)): node = children[i] largest =", "Input: # 1 # / \\ # 3 2 #", "\\ # 5 3 9 # Output: [1, 3, 9]", "# Version: 1.0 # 12/22/18 by Jianfa # ------------------------------ #", "largestValues(self, root): \"\"\" :type root: TreeNode :rtype: List[int] \"\"\" if", "return res # Used for testing if __name__ == \"__main__\":", "node. # class TreeNode: # def __init__(self, x): # self.val", "# Node of next row largest = -sys.maxsize # Largest", "TreeNode :rtype: List[int] \"\"\" if not root: return [] children", "515. Find Largest Value in Each Tree Row # #", "class Solution: def largestValues(self, root): \"\"\" :type root: TreeNode :rtype:", "if node.right: temp.append(node.right) res.append(largest) children = temp return res #", "List[int] \"\"\" if not root: return [] children = [root]", "temp.append(node.right) res.append(largest) children = temp return res # Used for", "Description: # You need to find the largest value in", "if node.left: temp.append(node.left) if node.right: temp.append(node.right) res.append(largest) children = temp", "testing if __name__ == \"__main__\": test = Solution() # ------------------------------", "= x # self.left = None # self.right = None", "# Definition for a binary tree node. # class TreeNode:", "# Used for testing if __name__ == \"__main__\": test =", "/ \\ # 3 2 # / \\ \\ #", "__name__ == \"__main__\": test = Solution() # ------------------------------ # Summary:", "# Largest number of this row for i in range(len(children)):", "\"__main__\": test = Solution() # ------------------------------ # Summary: # BFS", "return [] children = [root] res = [] while children:", "node = children[i] largest = max(node.val, largest) if node.left: temp.append(node.left)", "# 5 3 9 # Output: [1, 3, 9] #", "= children[i] largest = max(node.val, largest) if node.left: temp.append(node.left) if", "binary tree. # Example: # Input: # 1 # /", "Find Largest Value in Each Tree Row # # Description:", "children = temp return res # Used for testing if" ]
[ "== 'done': #print(\"ignoring line while done: %s\" % line) pass", "if is_command: #if command not in [ 'start_block', 'end_block' ]:", "class Example(object): def __init__(self): # things we'll figure out as", "== 'start_block': self.examples.append(self.current_example) self.current_example = Example() self.set_phase('example') else: raise Exception(\"unknown", "line)) def handle_module_description(self, line): if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line", "2.0 (the \"License\"); # you may not use this file", "# commands must be in all caps. This is done", "raise Exception(\"unexpected end block without description\") else: raise Exception(\"unknown command:", "return (True, 'start_block', None) if '#============' in line.replace(\" \",\"\"): return", "self.phase == 'example_code': self.handle_example_code(self.current_example, line) elif self.phase == 'limbo': #print(\"ignoring", "Exception(\"unknown command: %s\" % command) elif self.phase == 'done': return", "== 'setup': self.set_phase('done') elif command == 'description': print(\"MOV!\") self.set_phase('example_description') elif", "colons in URLs and so on. print(\"REJECT: %s\" % tokens[0])", "command, rest) = self.load_command(line) print(\"%s line | %s\" % (self.count,", "starters return (False, None, None) if not line.startswith(\"#\"): # commands", "description lines must be comments self.handle_module_description(line) elif self.phase == 'example':", "commands\") elif self.phase == 'description': # module description lines must", "a command if '#------------' in line.replace(\" \",\"\"): return (True, 'start_block',", "print(\"---------------------------------------------------------\") self.current_example.name = rest elif command == 'setup': self.set_phase('done') elif", "example, line): # could be a comment or the code", "| %s\" % (self.count, self.phase)) print(\"---------------------------------------------------------\") @classmethod def from_file(cls, filename):", "def from_file(cls, filename): r = cls() r.name = os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\")", "elif self.phase == 'description': # module description lines must be", "self.phase == 'example': # in example phase we can only", "a colon unless they are blocks or DESCRIPTION starters return", "None, None) if not line.startswith(\"#\"): # commands must be in", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "def __init__(self): # things which we'll figure out as we", "None) if '#============' in line.replace(\" \",\"\"): return (True, 'end_block', None)", "return (False, None, None) # at this point we are", "Exception(\"unknown command: %s\" % command) elif self.phase == 'example_description': #", "print(\"---------------------------------------------------------\") print(\"%s exmp | %s\" % (self.count, rest)) print(\"---------------------------------------------------------\") self.current_example.name", "'description': print(\"---------------------------------------------------------\") self.set_phase('description') elif command == 'end_block': raise Exception(\"unexpected end", "phase # by hitting the description command if command ==", "description command if command == 'example': print(\"---------------------------------------------------------\") print(\"%s exmp |", "self.phase == 'limbo': #print(\"ignoring line while in limbo: %s\" %", "line = line.replace(\"#\",\"\",1).strip() if line.startswith(\"DESCRIPTION\"): return (True, 'description', None) tokens", "example phase if command == 'start_block': self.set_phase('example') else: raise Exception(\"invalid", "scan the example self.name = \"\" self.purpose = \"\" self.provider_names", "or line.replace(\"#\",\"\").strip(): raise Exception(\"the example phase should be all commands\")", "def set_phase(self, phase): self.phase = phase print(\"---------------------------------------------------------\") print(\"%s phase |", "self.phase == 'example_code': # in example code phase we can", "if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line = line.strip() print(\"%s desc", "Example() self.phase = 'module' self.count = 0 def set_phase(self, phase):", "0 def set_phase(self, phase): self.phase = phase print(\"---------------------------------------------------------\") print(\"%s phase", "%s\" % ('0', r.name)) data = open(filename).read().splitlines() for line in", "should be all commands\") elif self.phase == 'description': # module", "self.phase == 'description': # module description lines must be comments", "self.phase) return True # continue def handle_command(self, command, rest): #print(\"<PHASE:", "we are sure it is a command if '#------------' in", "in line.replace(\" \",\"\"): return (True, 'end_block', None) # throw away", "away the leading comment line = line.replace(\"#\",\"\",1).strip() if line.startswith(\"DESCRIPTION\"): return", "cls() r.name = os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\") print(\"%s M | %s\" %", "(self.count, line)) def handle_module_description(self, line): if line.startswith(\"#\"): line = line.replace(\"#\",\"\")", "self.phase)) print(\"---------------------------------------------------------\") @classmethod def from_file(cls, filename): r = cls() r.name", "use this file except in compliance with the License. #", "% command) elif self.phase == 'example': # in example phase", "tokens = line.split(':', 1) command = tokens[0].replace(\"#\",\"\").strip().lower() rest = tokens[1].strip()", "| %s\" % (self.count, command, rest)) if command == 'module':", "(True, 'start_block', None) if '#============' in line.replace(\" \",\"\"): return (True,", "elif command == 'purpose': self.purpose = rest elif command ==", "until we find # another example start block if command", "== 'limbo': # in limbo, seeing a start block moves", "'example': print(\"---------------------------------------------------------\") print(\"%s exmp | %s\" % (self.count, rest)) print(\"---------------------------------------------------------\")", "'#------------' in line.replace(\" \",\"\"): return (True, 'start_block', None) if '#============'", "limbo: %s\" % line) pass elif self.phase == 'done': #print(\"ignoring", "'description': print(\"MOV!\") self.set_phase('example_description') elif command == 'see_files' or command ==", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "or '====' in line: pass elif not \":\" in line:", "License. # You may obtain a copy of the License", "if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the example phase should", "in ['start_block', 'end_block']: print(\"%s set | %-20s | %s\" %", "LLC, <<EMAIL>> # # Licensed under the Apache License, Version", "or line.replace(\"#\",\"\").strip(): raise Exception(\"the module phase should be all commands\")", "unless they are blocks or DESCRIPTION starters return (False, None,", "= line.split(\":\") if tokens[0].upper() != tokens[0]: # commands must be", "all commands\") elif self.phase == 'example_description': self.handle_example_description(self.current_example, line) elif self.phase", "def load_command(self, line): if \"DESCRIPTION\" in line or '----' in", "print(\"%s code | %s\" % (self.count, line)) def handle_module_description(self, line):", "under the License is distributed on an \"AS IS\" BASIS,", "% tokens[0]) return (False, None, None) # at this point", "rest = tokens[1].strip() return (True, command, rest) def handle_line(self, line):", "elif command == 'description': print(\"MOV!\") self.set_phase('example_description') elif command == 'see_files'", "License for the specific language governing permissions and # limitations", "self.current_example.name = rest elif command == 'setup': self.set_phase('done') elif command", "command == 'category': self.category = rest elif command == 'purpose':", "all commands\") elif self.phase == 'description': # module description lines", "command == 'start_block': self.examples.append(self.current_example) self.current_example = Example() self.set_phase('example') else: raise", "# module description lines must be comments self.handle_module_description(line) elif self.phase", "elif self.phase == 'done': #print(\"ignoring line while done: %s\" %", "# when we find the description command if command not", "!= tokens[0]: # commands must be in all caps. This", "True #print(\"PHASE=%s\" % self.phase) #print(\"LINE=%s\" % line) if self.phase ==", "self.load_command(line) print(\"%s line | %s\" % (self.count, line)) #if command", "module phase should be all commands\") elif self.phase == 'description':", "in description phase end block moves us into limbo until", "'====' in line: pass elif not \":\" in line: #", "this point we are sure it is a command if", "%s\" % tokens[0]) return (False, None, None) # at this", "the example self.name = \"\" self.purpose = \"\" self.provider_names =", "== 'example': # in example phase we can only move", "'setup': self.set_phase('done') elif command == 'description': print(\"MOV!\") self.set_phase('example_description') elif command", "rest elif command == 'purpose': self.purpose = rest elif command", "= [] self.examples = [] self.current_example = Example() self.phase =", "raise Exception(\"unknown phase: %s\" % self.phase) def handle_example_description(self, example, line):", "None, None) if \":\" in line: tokens = line.split(\":\") if", "== 'module': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the module", "# limitations under the License. import os class Example(object): def", "into example phase by # hitting a start block if", "command) elif self.phase == 'done': return False else: raise Exception(\"unknown", "print(\"=========================================================\") print(\"%s M | %s\" % ('0', r.name)) data =", "if command == 'example': print(\"---------------------------------------------------------\") print(\"%s exmp | %s\" %", "x.strip() for x in rest.split(\",\")] else: raise Exception(\"unknown command: %s\"", "(True, 'description', None) tokens = line.split(':', 1) command = tokens[0].replace(\"#\",\"\").strip().lower()", "% self.phase) return True # continue def handle_command(self, command, rest):", "module description lines must be comments self.handle_module_description(line) elif self.phase ==", "% (self.count, line)) #if command == 'policy': # return False", "so on. print(\"REJECT: %s\" % tokens[0]) return (False, None, None)", "elif command == 'fyi': pass elif command == 'description': print(\"---------------------------------------------------------\")", "in compliance with the License. # You may obtain a", "command: %s\" % command) elif self.phase == 'example': # in", "if self.phase == 'module': # from module mode the only", "REST: %s>\" % (self.phase, command, rest)) if self.phase == 'done':", "software # distributed under the License is distributed on an", "= Example() self.phase = 'module' self.count = 0 def set_phase(self,", "\",\"\"): return (True, 'start_block', None) if '#============' in line.replace(\" \",\"\"):", "pass elif not \":\" in line: # commands must contain", "| %s\" % (self.count, line)) #if command == 'policy': #", "line = line.replace(\"#\",\"\") line = line.strip() if line: print(\"%s mdesc", "block if command == 'end_block': self.set_phase('limbo') else: raise Exception(\"invalid command:", "# hitting a start block if command == 'start_block': self.examples.append(self.current_example)", "line or '----' in line or '====' in line: pass", "into limbo until we find # another example start block", "Exception(\"unknown phase: %s\" % self.phase) def handle_example_description(self, example, line): #", "out as we scan the example self.name = \"\" self.purpose", "if command == 'module': pass elif command == 'start_block': pass", "raise Exception(\"unknown phase: %s\" % self.phase) return True # continue", "commands must be in comments return (False, None, None) if", "self.phase == 'example': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the", "moves us into example phase if command == 'start_block': self.set_phase('example')", "raise Exception(\"unknown command: %s\" % command) elif self.phase == 'done':", "%s\" % command) elif self.phase == 'example_description': # in example", "raise Exception(\"invalid command: %s\" % command) elif self.phase == 'example':", "are blocks or DESCRIPTION starters return (False, None, None) if", "#print(\"ignoring line while done: %s\" % line) pass else: raise", "self.current_example.see_files = [ x.strip() for x in rest.split(\",\")] else: raise", "elif self.phase == 'description': # in description phase end block", "elif self.phase == 'example_description': self.handle_example_description(self.current_example, line) elif self.phase == 'example_code':", "comments self.handle_module_description(line) elif self.phase == 'example': if not line.startswith(\"#\") or", "commands must be in all caps. This is done #", "self.phase == 'module': # from module mode the only state", "example description phase # by hitting the description command if", "(command, rest)) self.handle_command(command, rest) return True #print(\"PHASE=%s\" % self.phase) #print(\"LINE=%s\"", "point we are sure it is a command if '#------------'", "code phase # by hitting an end block if command", "raise Exception(\"unknown command: %s\" % command) elif self.phase == 'example_code':", "== 'category': self.category = rest elif command == 'purpose': self.purpose", "__init__(self): # things we'll figure out as we scan an", "if command == 'start_block': self.set_phase('example') else: raise Exception(\"invalid command: %s\"", "module mode the only state transition is into module_description mode", "if command == 'start_block': self.examples.append(self.current_example) self.current_example = Example() self.set_phase('example') else:", "# print(\"keyword: %s => %s\" % (command, rest)) self.handle_command(command, rest)", "for x in rest.split(\",\") ] elif command == 'providers': self.providers", "]: # print(\"keyword: %s => %s\" % (command, rest)) self.handle_command(command,", "for line in data: if not r.handle_line(line): break return r", "self.count + 1 (is_command, command, rest) = self.load_command(line) print(\"%s line", "'end_block': raise Exception(\"unexpected end block without description\") else: raise Exception(\"unknown", "self.examples.append(self.current_example) self.current_example = Example() self.set_phase('example') else: raise Exception(\"unknown command: %s\"", "in line: tokens = line.split(\":\") if tokens[0].upper() != tokens[0]: #", "we scan an example self.name = \"\" self.see_files = []", "phase we can only move into example description phase #", "can only move into example code phase # by hitting", "line.replace(\"#\",\"\") line = line.strip() if line: print(\"%s mdesc | %s\"", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "'start_block': pass elif command == 'category': self.category = rest elif", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "raise Exception(\"the example phase should be all commands\") elif self.phase", "self.phase == 'example_description': self.handle_example_description(self.current_example, line) elif self.phase == 'example_code': self.handle_example_code(self.current_example,", "# continue def handle_command(self, command, rest): #print(\"<PHASE: %s, COMMAND: %s,", "| %-20s | %s\" % (self.count, command, rest)) if command", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "leading comment line = line.replace(\"#\",\"\",1).strip() if line.startswith(\"DESCRIPTION\"): return (True, 'description',", "example phase should be all commands\") elif self.phase == 'example_description':", "else: raise Exception(\"unknown phase: %s\" % self.phase) def handle_example_description(self, example,", "\"DESCRIPTION\" in line or '----' in line or '====' in", "to in writing, software # distributed under the License is", "rest.split(\",\") ] elif command == 'fyi': pass elif command ==", "# throw away the leading comment line = line.replace(\"#\",\"\",1).strip() if", "r.name)) data = open(filename).read().splitlines() for line in data: if not", "# See the License for the specific language governing permissions", "command == 'module': pass elif command == 'start_block': pass elif", "'#============' in line.replace(\" \",\"\"): return (True, 'end_block', None) # throw", "self.code = [] class Record(object): def __init__(self): # things which", "we want to keep both if line.startswith(\"#\"): line = line.replace(\"#\",\"\")", "%s, REST: %s>\" % (self.phase, command, rest)) if self.phase ==", "'limbo': #print(\"ignoring line while in limbo: %s\" % line) pass", "under the License. import os class Example(object): def __init__(self): #", "or agreed to in writing, software # distributed under the", "'end_block', None) # throw away the leading comment line =", "<<EMAIL>> # # Licensed under the Apache License, Version 2.0", "required by applicable law or agreed to in writing, software", "None, None) # at this point we are sure it", "%s\" % command) elif self.phase == 'example_code': # in example", "command == 'end_block': raise Exception(\"unexpected end block without description\") else:", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the example phase should be all", "== 'example_description': # in example description phase we can only", "\"\" self.provider_names = [] self.related_modules = [] self.category = \"\"", "r = cls() r.name = os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\") print(\"%s M |", "with the License. # You may obtain a copy of", "command, rest) def handle_line(self, line): self.count = self.count + 1", "= Example() self.set_phase('example') else: raise Exception(\"unknown command: %s\" % command)", "'description': # module description lines must be comments self.handle_module_description(line) elif", "# in example description phase we can only move into", "phase): self.phase = phase print(\"---------------------------------------------------------\") print(\"%s phase | %s\" %", "break return r def load_command(self, line): if \"DESCRIPTION\" in line", "def handle_example_code(self, example, line): line = line.rstrip() example.code.append(line) print(\"%s code", "self.examples = [] self.current_example = Example() self.phase = 'module' self.count", "print(\"keyword: %s => %s\" % (command, rest)) self.handle_command(command, rest) return", "% line) pass elif self.phase == 'done': #print(\"ignoring line while", "if self.phase == 'done': return False if self.phase == 'module':", "code example, we want to keep both if line.startswith(\"#\"): line", "COMMAND: %s, REST: %s>\" % (self.phase, command, rest)) if self.phase", "= self.count + 1 (is_command, command, rest) = self.load_command(line) print(\"%s", "be a comment or the code example, we want to", "pass else: raise Exception(\"unknown phase: %s\" % self.phase) return True", "example.description.append(line) def handle_example_code(self, example, line): line = line.rstrip() example.code.append(line) print(\"%s", "lines must be comments self.handle_module_description(line) elif self.phase == 'example': if", "end block if command == 'end_block': print(\"-------\") self.set_phase('example_code') else: raise", "is_command: #if command not in [ 'start_block', 'end_block' ]: #", "== 'limbo': #print(\"ignoring line while in limbo: %s\" % line)", "compliance with the License. # You may obtain a copy", "and so on. print(\"REJECT: %s\" % tokens[0]) return (False, None,", "start block moves us into example phase if command ==", "agreed to in writing, software # distributed under the License", "Exception(\"unknown command: %s\" % command) elif self.phase == 'description': #", "transition is into module_description mode # when we find the", "command not in [ 'start_block', 'end_block' ]: # print(\"keyword: %s", "description phase end block moves us into limbo until we", "else: raise Exception(\"invalid command: %s\" % command) elif self.phase ==", "(self.count, rest)) print(\"---------------------------------------------------------\") self.current_example.name = rest elif command == 'setup':", "line): line = line.rstrip() example.code.append(line) print(\"%s code | %s\" %", "distributed under the License is distributed on an \"AS IS\"", "self.phase == 'done': #print(\"ignoring line while done: %s\" % line)", "# in description phase end block moves us into limbo", "Exception(\"unexpected end block without description\") else: raise Exception(\"unknown command: %s\"", "line = line.replace(\"#\",\"\") line = line.strip() print(\"%s desc | %s\"", "the License. import os class Example(object): def __init__(self): # things", "self.handle_example_description(self.current_example, line) elif self.phase == 'example_code': self.handle_example_code(self.current_example, line) elif self.phase", "% (self.count, rest)) print(\"---------------------------------------------------------\") self.current_example.name = rest elif command ==", "example code phase we can only move back into example", "= [] self.category = \"\" self.description = [] self.examples =", "== 'example_code': # in example code phase we can only", "[] self.examples = [] self.current_example = Example() self.phase = 'module'", "(True, command, rest) def handle_line(self, line): self.count = self.count +", "= cls() r.name = os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\") print(\"%s M | %s\"", "% (self.count, line)) def handle_module_description(self, line): if line.startswith(\"#\"): line =", "print(\"---------------------------------------------------------\") @classmethod def from_file(cls, filename): r = cls() r.name =", "class Record(object): def __init__(self): # things which we'll figure out", "(False, None, None) # at this point we are sure", "be in comments return (False, None, None) if \":\" in", "in URLs and so on. print(\"REJECT: %s\" % tokens[0]) return", "1) command = tokens[0].replace(\"#\",\"\").strip().lower() rest = tokens[1].strip() return (True, command,", "rest)) print(\"---------------------------------------------------------\") self.current_example.name = rest elif command == 'setup': self.set_phase('done')", "block if command == 'start_block': self.examples.append(self.current_example) self.current_example = Example() self.set_phase('example')", "self.set_phase('done') elif command == 'description': print(\"MOV!\") self.set_phase('example_description') elif command ==", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "line) if self.phase == 'module': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip():", "phase print(\"---------------------------------------------------------\") print(\"%s phase | %s\" % (self.count, self.phase)) print(\"---------------------------------------------------------\")", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "command: %s\" % command) elif self.phase == 'limbo': # in", "line: # commands must contain a colon unless they are", "not use this file except in compliance with the License.", "rest.split(\",\")] else: raise Exception(\"unknown command: %s\" % command) elif self.phase", "end block without description\") else: raise Exception(\"unknown command: %s\" %", "into module_description mode # when we find the description command", "writing, software # distributed under the License is distributed on", "% ('0', r.name)) data = open(filename).read().splitlines() for line in data:", "command, rest): #print(\"<PHASE: %s, COMMAND: %s, REST: %s>\" % (self.phase,", "command == 'purpose': self.purpose = rest elif command == 'related':", "print(\"%s desc | %s\" % (self.count, line)) example.description.append(line) def handle_example_code(self,", "return (True, 'description', None) tokens = line.split(':', 1) command =", "the only state transition is into module_description mode # when", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "= [] self.code = [] class Record(object): def __init__(self): #", "command == 'end_block': print(\"-------\") self.set_phase('example_code') else: raise Exception(\"unknown command: %s\"", "== 'done': return False else: raise Exception(\"unknown phase: %s\" %", "want to keep both if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line", "figure out as we scan the example self.name = \"\"", "in example code phase we can only move back into", "end block moves us into limbo until we find #", "'purpose': self.purpose = rest elif command == 'related': self.related_modules =", "elif self.phase == 'example_description': # in example description phase we", "command == 'providers': self.providers = [ x.strip() for x in", "self.phase == 'description': # in description phase end block moves", "== 'start_block': self.set_phase('example') else: raise Exception(\"invalid command: %s\" % command)", "self.related_modules = [] self.category = \"\" self.description = [] self.examples", "blocks or DESCRIPTION starters return (False, None, None) if not", "%s\" % line) pass else: raise Exception(\"unknown phase: %s\" %", "if command == 'end_block': self.set_phase('limbo') else: raise Exception(\"invalid command: %s\"", "in example description phase we can only move into example", "False if is_command: #if command not in [ 'start_block', 'end_block'", "continue def handle_command(self, command, rest): #print(\"<PHASE: %s, COMMAND: %s, REST:", "\":\" in line: tokens = line.split(\":\") if tokens[0].upper() != tokens[0]:", "= [ x.strip() for x in rest.split(\",\") ] elif command", "] elif command == 'fyi': pass elif command == 'description':", "| %s\" % (self.count, rest)) print(\"---------------------------------------------------------\") self.current_example.name = rest elif", "self.current_example = Example() self.phase = 'module' self.count = 0 def", "CONDITIONS OF ANY KIND, either express or implied. # See", "['start_block', 'end_block']: print(\"%s set | %-20s | %s\" % (self.count,", "not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the example phase should be", "== 'end_block': self.set_phase('limbo') else: raise Exception(\"invalid command: %s\" % command)", "move into example code phase # by hitting an end", "(self.count, self.phase)) print(\"---------------------------------------------------------\") @classmethod def from_file(cls, filename): r = cls()", "self.phase) #print(\"LINE=%s\" % line) if self.phase == 'module': if not", "it is a command if '#------------' in line.replace(\" \",\"\"): return", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "done # so we don't get confused by colons in", "pass elif command == 'category': self.category = rest elif command", "rest) = self.load_command(line) print(\"%s line | %s\" % (self.count, line))", "self.handle_module_description(line) elif self.phase == 'example': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip():", "line.strip() if line: print(\"%s mdesc | %s\" % (self.count, line))", "command == 'see_files' or command == 'see_file': self.current_example.see_files = [", "exmp | %s\" % (self.count, rest)) print(\"---------------------------------------------------------\") self.current_example.name = rest", "if \"DESCRIPTION\" in line or '----' in line or '===='", "print(\"REJECT: %s\" % tokens[0]) return (False, None, None) # at", "line.replace(\" \",\"\"): return (True, 'start_block', None) if '#============' in line.replace(\"", "% command) elif self.phase == 'example_code': # in example code", "line while in limbo: %s\" % line) pass elif self.phase", "== 'module': pass elif command == 'start_block': pass elif command", "'----' in line or '====' in line: pass elif not", "def handle_command(self, command, rest): #print(\"<PHASE: %s, COMMAND: %s, REST: %s>\"", "code | %s\" % (self.count, line)) def handle_module_description(self, line): if", "| %s\" % (self.count, line)) def handle_module_description(self, line): if line.startswith(\"#\"):", "colon unless they are blocks or DESCRIPTION starters return (False,", "command: %s\" % command) elif self.phase == 'example_code': # in", "phase we can only move into example code phase #", "= [] class Record(object): def __init__(self): # things which we'll", "load_command(self, line): if \"DESCRIPTION\" in line or '----' in line", "line in data: if not r.handle_line(line): break return r def", "None) # at this point we are sure it is", "== 'description': # module description lines must be comments self.handle_module_description(line)", "elif command == 'description': print(\"---------------------------------------------------------\") self.set_phase('description') elif command == 'end_block':", "line.replace(\" \",\"\"): return (True, 'end_block', None) # throw away the", "'description', None) tokens = line.split(':', 1) command = tokens[0].replace(\"#\",\"\").strip().lower() rest", "keep both if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line = line.strip()", "if command not in ['start_block', 'end_block']: print(\"%s set | %-20s", "% command) elif self.phase == 'description': # in description phase", "('0', r.name)) data = open(filename).read().splitlines() for line in data: if", "[ 'start_block', 'end_block' ]: # print(\"keyword: %s => %s\" %", "== 'example_code': self.handle_example_code(self.current_example, line) elif self.phase == 'limbo': #print(\"ignoring line", "self.providers = [ x.strip() for x in rest.split(\",\") ] elif", "line): if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line = line.strip() if", "r.name = os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\") print(\"%s M | %s\" % ('0',", "= \"\" self.provider_names = [] self.related_modules = [] self.category =", "rest): #print(\"<PHASE: %s, COMMAND: %s, REST: %s>\" % (self.phase, command,", "line.rstrip() example.code.append(line) print(\"%s code | %s\" % (self.count, line)) def", "#print(\"ignoring line while in limbo: %s\" % line) pass elif", "def __init__(self): # things we'll figure out as we scan", "when we find the description command if command not in", "OR CONDITIONS OF ANY KIND, either express or implied. #", "governing permissions and # limitations under the License. import os", "pass elif self.phase == 'done': #print(\"ignoring line while done: %s\"", "% (command, rest)) self.handle_command(command, rest) return True #print(\"PHASE=%s\" % self.phase)", "the License is distributed on an \"AS IS\" BASIS, #", "= open(filename).read().splitlines() for line in data: if not r.handle_line(line): break", "from module mode the only state transition is into module_description", "only state transition is into module_description mode # when we", "# from module mode the only state transition is into", "start block if command == 'end_block': self.set_phase('limbo') else: raise Exception(\"invalid", "self.phase = 'module' self.count = 0 def set_phase(self, phase): self.phase", "elif not \":\" in line: # commands must contain a", "= line.replace(\"#\",\"\") line = line.strip() if line: print(\"%s mdesc |", "data: if not r.handle_line(line): break return r def load_command(self, line):", "%-20s | %s\" % (self.count, command, rest)) if command ==", "== 'providers': self.providers = [ x.strip() for x in rest.split(\",\")", "command if command == 'example': print(\"---------------------------------------------------------\") print(\"%s exmp | %s\"", "if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line = line.strip() if line:", "| %s\" % ('0', r.name)) data = open(filename).read().splitlines() for line", "'description': # in description phase end block moves us into", "# could be a comment or the code example, we", "data = open(filename).read().splitlines() for line in data: if not r.handle_line(line):", "x.strip() for x in rest.split(\",\") ] elif command == 'providers':", "self.phase == 'module': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the", "things we'll figure out as we scan an example self.name", "phase if command == 'start_block': self.set_phase('example') else: raise Exception(\"invalid command:", "'see_file': self.current_example.see_files = [ x.strip() for x in rest.split(\",\")] else:", "== 'purpose': self.purpose = rest elif command == 'related': self.related_modules", "not in [ 'start_block', 'end_block' ]: # print(\"keyword: %s =>", "raise Exception(\"unknown command: %s\" % command) elif self.phase == 'example_description':", "line.split(\":\") if tokens[0].upper() != tokens[0]: # commands must be in", "+ 1 (is_command, command, rest) = self.load_command(line) print(\"%s line |", "should be all commands\") elif self.phase == 'example_description': self.handle_example_description(self.current_example, line)", "%s\" % self.phase) return True # continue def handle_command(self, command,", "'category': self.category = rest elif command == 'purpose': self.purpose =", "we'll figure out as we scan an example self.name =", "be all commands\") elif self.phase == 'example_description': self.handle_example_description(self.current_example, line) elif", "comments return (False, None, None) if \":\" in line: tokens", "sure it is a command if '#------------' in line.replace(\" \",\"\"):", "in line.replace(\" \",\"\"): return (True, 'start_block', None) if '#============' in", "None) tokens = line.split(':', 1) command = tokens[0].replace(\"#\",\"\").strip().lower() rest =", "line.replace(\"#\",\"\") line = line.strip() print(\"%s desc | %s\" % (self.count,", "== 'description': print(\"MOV!\") self.set_phase('example_description') elif command == 'see_files' or command", "%s\" % (self.count, line)) #if command == 'policy': # return", "not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the module phase should be", "law or agreed to in writing, software # distributed under", "command, rest)) if self.phase == 'done': return False if self.phase", "| %s\" % (self.count, line)) example.description.append(line) def handle_example_code(self, example, line):", "# in limbo, seeing a start block moves us into", "'limbo': # in limbo, seeing a start block moves us", "hitting an end block if command == 'end_block': print(\"-------\") self.set_phase('example_code')", "%s, COMMAND: %s, REST: %s>\" % (self.phase, command, rest)) if", "line)) #if command == 'policy': # return False if is_command:", "commands must contain a colon unless they are blocks or", "things which we'll figure out as we scan the example", "desc | %s\" % (self.count, line)) example.description.append(line) def handle_example_code(self, example,", "\"\" self.description = [] self.examples = [] self.current_example = Example()", "line.startswith(\"#\"): line = line.replace(\"#\",\"\") line = line.strip() print(\"%s desc |", "scan an example self.name = \"\" self.see_files = [] self.description", "self.phase = phase print(\"---------------------------------------------------------\") print(\"%s phase | %s\" % (self.count,", "figure out as we scan an example self.name = \"\"", "= rest elif command == 'setup': self.set_phase('done') elif command ==", "get confused by colons in URLs and so on. print(\"REJECT:", "True # continue def handle_command(self, command, rest): #print(\"<PHASE: %s, COMMAND:", "by hitting the description command if command == 'example': print(\"---------------------------------------------------------\")", "may obtain a copy of the License at # #", "block moves us into example phase if command == 'start_block':", "elif command == 'setup': self.set_phase('done') elif command == 'description': print(\"MOV!\")", "print(\"---------------------------------------------------------\") self.set_phase('description') elif command == 'end_block': raise Exception(\"unexpected end block", "contain a colon unless they are blocks or DESCRIPTION starters", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "must be in all caps. This is done # so", "print(\"-------\") self.set_phase('example_code') else: raise Exception(\"unknown command: %s\" % command) elif", "rest) return True #print(\"PHASE=%s\" % self.phase) #print(\"LINE=%s\" % line) if", "'module' self.count = 0 def set_phase(self, phase): self.phase = phase", "if not r.handle_line(line): break return r def load_command(self, line): if", "return False if self.phase == 'module': # from module mode", "self.phase == 'done': return False else: raise Exception(\"unknown phase: %s\"", "return (True, command, rest) def handle_line(self, line): self.count = self.count", "= line.replace(\"#\",\"\") line = line.strip() print(\"%s desc | %s\" %", "hitting a start block if command == 'start_block': self.examples.append(self.current_example) self.current_example", "may not use this file except in compliance with the", "the description command if command not in ['start_block', 'end_block']: print(\"%s", "command == 'description': print(\"MOV!\") self.set_phase('example_description') elif command == 'see_files' or", "self.count = 0 def set_phase(self, phase): self.phase = phase print(\"---------------------------------------------------------\")", "in data: if not r.handle_line(line): break return r def load_command(self,", "Exception(\"the module phase should be all commands\") elif self.phase ==", "not r.handle_line(line): break return r def load_command(self, line): if \"DESCRIPTION\"", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "phase: %s\" % self.phase) return True # continue def handle_command(self,", "= line.rstrip() example.code.append(line) print(\"%s code | %s\" % (self.count, line))", "they are blocks or DESCRIPTION starters return (False, None, None)", "this file except in compliance with the License. # You", "line) pass elif self.phase == 'done': #print(\"ignoring line while done:", "rest)) if self.phase == 'done': return False if self.phase ==", "line = line.strip() print(\"%s desc | %s\" % (self.count, line))", "print(\"MOV!\") self.set_phase('example_description') elif command == 'see_files' or command == 'see_file':", "we don't get confused by colons in URLs and so", "not line.startswith(\"#\"): # commands must be in comments return (False,", "== 'example': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the example", "into example phase if command == 'start_block': self.set_phase('example') else: raise", "or '----' in line or '====' in line: pass elif", "= 0 def set_phase(self, phase): self.phase = phase print(\"---------------------------------------------------------\") print(\"%s", "line.startswith(\"#\"): line = line.replace(\"#\",\"\") line = line.strip() if line: print(\"%s", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "%s\" % (self.count, line)) example.description.append(line) def handle_example_code(self, example, line): line", "(self.count, line)) example.description.append(line) def handle_example_code(self, example, line): line = line.rstrip()", "# # Licensed under the Apache License, Version 2.0 (the", "False else: raise Exception(\"unknown phase: %s\" % self.phase) def handle_example_description(self,", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "command == 'policy': # return False if is_command: #if command", "elif command == 'start_block': pass elif command == 'category': self.category", "at this point we are sure it is a command", "__init__(self): # things which we'll figure out as we scan", "= tokens[0].replace(\"#\",\"\").strip().lower() rest = tokens[1].strip() return (True, command, rest) def", "into example description phase # by hitting the description command", "command = tokens[0].replace(\"#\",\"\").strip().lower() rest = tokens[1].strip() return (True, command, rest)", "if tokens[0].upper() != tokens[0]: # commands must be in all", "throw away the leading comment line = line.replace(\"#\",\"\",1).strip() if line.startswith(\"DESCRIPTION\"):", "into example code phase # by hitting an end block", "= self.load_command(line) print(\"%s line | %s\" % (self.count, line)) #if", "by # hitting a start block if command == 'start_block':", "be comments self.handle_module_description(line) elif self.phase == 'example': if not line.startswith(\"#\")", "%s\" % command) elif self.phase == 'example': # in example", "handle_module_description(self, line): if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line = line.strip()", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "'fyi': pass elif command == 'description': print(\"---------------------------------------------------------\") self.set_phase('description') elif command", "not \":\" in line: # commands must contain a colon", "set | %-20s | %s\" % (self.count, command, rest)) if", "% self.phase) def handle_example_description(self, example, line): # could be a", "'end_block']: print(\"%s set | %-20s | %s\" % (self.count, command,", "# Copyright 2018 <NAME> LLC, <<EMAIL>> # # Licensed under", "'done': return False if self.phase == 'module': # from module", "self.set_phase('limbo') else: raise Exception(\"invalid command: %s\" % command) elif self.phase", "[] self.description = [] self.code = [] class Record(object): def", "in rest.split(\",\") ] elif command == 'fyi': pass elif command", "print(\"%s exmp | %s\" % (self.count, rest)) print(\"---------------------------------------------------------\") self.current_example.name =", "language governing permissions and # limitations under the License. import", "example self.name = \"\" self.purpose = \"\" self.provider_names = []", "self.category = \"\" self.description = [] self.examples = [] self.current_example", "(False, None, None) if \":\" in line: tokens = line.split(\":\")", "self.name = \"\" self.see_files = [] self.description = [] self.code", "is done # so we don't get confused by colons", "#print(\"<PHASE: %s, COMMAND: %s, REST: %s>\" % (self.phase, command, rest))", "handle_example_description(self, example, line): # could be a comment or the", "command == 'example': print(\"---------------------------------------------------------\") print(\"%s exmp | %s\" % (self.count,", "rest.split(\",\") ] elif command == 'providers': self.providers = [ x.strip()", "% command) elif self.phase == 'limbo': # in limbo, seeing", "# things we'll figure out as we scan an example", "return (False, None, None) if \":\" in line: tokens =", "# return False if is_command: #if command not in [", "x in rest.split(\",\") ] elif command == 'providers': self.providers =", "block if command == 'end_block': print(\"-------\") self.set_phase('example_code') else: raise Exception(\"unknown", "elif self.phase == 'example_code': # in example code phase we", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "URLs and so on. print(\"REJECT: %s\" % tokens[0]) return (False,", "in [ 'start_block', 'end_block' ]: # print(\"keyword: %s => %s\"", "elif self.phase == 'limbo': #print(\"ignoring line while in limbo: %s\"", "in line or '----' in line or '====' in line:", "in comments return (False, None, None) if \":\" in line:", "self.current_example = Example() self.set_phase('example') else: raise Exception(\"unknown command: %s\" %", "'providers': self.providers = [ x.strip() for x in rest.split(\",\") ]", "elif command == 'end_block': raise Exception(\"unexpected end block without description\")", "self.phase == 'limbo': # in limbo, seeing a start block", "if self.phase == 'module': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise", "= phase print(\"---------------------------------------------------------\") print(\"%s phase | %s\" % (self.count, self.phase))", "'end_block': self.set_phase('limbo') else: raise Exception(\"invalid command: %s\" % command) elif", "self.handle_command(command, rest) return True #print(\"PHASE=%s\" % self.phase) #print(\"LINE=%s\" % line)", "or implied. # See the License for the specific language", "'start_block': self.set_phase('example') else: raise Exception(\"invalid command: %s\" % command) elif", "Example() self.set_phase('example') else: raise Exception(\"unknown command: %s\" % command) elif", "[] self.related_modules = [] self.category = \"\" self.description = []", "done: %s\" % line) pass else: raise Exception(\"unknown phase: %s\"", "the leading comment line = line.replace(\"#\",\"\",1).strip() if line.startswith(\"DESCRIPTION\"): return (True,", "rest) def handle_line(self, line): self.count = self.count + 1 (is_command,", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "# in example phase we can only move into example", "an end block if command == 'end_block': print(\"-------\") self.set_phase('example_code') else:", "== 'example': print(\"---------------------------------------------------------\") print(\"%s exmp | %s\" % (self.count, rest))", "== 'module': # from module mode the only state transition", "find # another example start block if command == 'end_block':", "%s\" % command) elif self.phase == 'done': return False else:", "handle_line(self, line): self.count = self.count + 1 (is_command, command, rest)", "self.category = rest elif command == 'purpose': self.purpose = rest", "Exception(\"unknown command: %s\" % command) elif self.phase == 'example_code': #", "without description\") else: raise Exception(\"unknown command: %s\" % command) elif", "[] self.category = \"\" self.description = [] self.examples = []", "example, we want to keep both if line.startswith(\"#\"): line =", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "return True #print(\"PHASE=%s\" % self.phase) #print(\"LINE=%s\" % line) if self.phase", "'example_code': self.handle_example_code(self.current_example, line) elif self.phase == 'limbo': #print(\"ignoring line while", "must be in comments return (False, None, None) if \":\"", "limbo until we find # another example start block if", "line): # could be a comment or the code example,", "module_description mode # when we find the description command if", "self.phase == 'example_description': # in example description phase we can", "= os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\") print(\"%s M | %s\" % ('0', r.name))", "line.startswith(\"DESCRIPTION\"): return (True, 'description', None) tokens = line.split(':', 1) command", "% line) if self.phase == 'module': if not line.startswith(\"#\") or", "command == 'description': print(\"---------------------------------------------------------\") self.set_phase('description') elif command == 'end_block': raise", "(True, 'end_block', None) # throw away the leading comment line", "tokens[0]) return (False, None, None) # at this point we", "handle_example_code(self, example, line): line = line.rstrip() example.code.append(line) print(\"%s code |", "don't get confused by colons in URLs and so on.", "elif command == 'providers': self.providers = [ x.strip() for x", "= rest elif command == 'related': self.related_modules = [ x.strip()", "(the \"License\"); # you may not use this file except", "moves us into limbo until we find # another example", "# you may not use this file except in compliance", "self.purpose = rest elif command == 'related': self.related_modules = [", "a comment or the code example, we want to keep", "and # limitations under the License. import os class Example(object):", "move into example description phase # by hitting the description", "line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the module phase should be all", "line.strip() print(\"%s desc | %s\" % (self.count, line)) example.description.append(line) def", "limbo, seeing a start block moves us into example phase", "'see_files' or command == 'see_file': self.current_example.see_files = [ x.strip() for", "only move into example description phase # by hitting the", "an example self.name = \"\" self.see_files = [] self.description =", "example phase we can only move into example description phase", "line = line.rstrip() example.code.append(line) print(\"%s code | %s\" % (self.count,", "[] self.code = [] class Record(object): def __init__(self): # things", "raise Exception(\"unknown command: %s\" % command) elif self.phase == 'description':", "Exception(\"the example phase should be all commands\") elif self.phase ==", "Example(object): def __init__(self): # things we'll figure out as we", "% self.phase) #print(\"LINE=%s\" % line) if self.phase == 'module': if", "rest)) if command == 'module': pass elif command == 'start_block':", "%s => %s\" % (command, rest)) self.handle_command(command, rest) return True", "% command) elif self.phase == 'example_description': # in example description", "print(\"%s M | %s\" % ('0', r.name)) data = open(filename).read().splitlines()", "rest)) self.handle_command(command, rest) return True #print(\"PHASE=%s\" % self.phase) #print(\"LINE=%s\" %", "\"\" self.purpose = \"\" self.provider_names = [] self.related_modules = []", "seeing a start block moves us into example phase if", "# # Unless required by applicable law or agreed to", "'example_description': # in example description phase we can only move", "r def load_command(self, line): if \"DESCRIPTION\" in line or '----'", "(is_command, command, rest) = self.load_command(line) print(\"%s line | %s\" %", "we scan the example self.name = \"\" self.purpose = \"\"", "= line.replace(\"#\",\"\",1).strip() if line.startswith(\"DESCRIPTION\"): return (True, 'description', None) tokens =", "'end_block' ]: # print(\"keyword: %s => %s\" % (command, rest))", "description phase we can only move into example code phase", "= tokens[1].strip() return (True, command, rest) def handle_line(self, line): self.count", "x in rest.split(\",\") ] elif command == 'fyi': pass elif", "in rest.split(\",\")] else: raise Exception(\"unknown command: %s\" % command) elif", "pass elif command == 'start_block': pass elif command == 'category':", "self.phase) def handle_example_description(self, example, line): # could be a comment", "'start_block': self.examples.append(self.current_example) self.current_example = Example() self.set_phase('example') else: raise Exception(\"unknown command:", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "to keep both if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line =", "Version 2.0 (the \"License\"); # you may not use this", "if '#------------' in line.replace(\" \",\"\"): return (True, 'start_block', None) if", "return False else: raise Exception(\"unknown phase: %s\" % self.phase) def", "raise Exception(\"the module phase should be all commands\") elif self.phase", "self.see_files = [] self.description = [] self.code = [] class", "is a command if '#------------' in line.replace(\" \",\"\"): return (True,", "command == 'end_block': self.set_phase('limbo') else: raise Exception(\"invalid command: %s\" %", "for x in rest.split(\",\") ] elif command == 'fyi': pass", "hitting the description command if command == 'example': print(\"---------------------------------------------------------\") print(\"%s", "1 (is_command, command, rest) = self.load_command(line) print(\"%s line | %s\"", "= [] self.description = [] self.code = [] class Record(object):", "'related': self.related_modules = [ x.strip() for x in rest.split(\",\") ]", "us into example phase if command == 'start_block': self.set_phase('example') else:", "self.set_phase('example') else: raise Exception(\"invalid command: %s\" % command) elif self.phase", "command == 'see_file': self.current_example.see_files = [ x.strip() for x in", "commands\") elif self.phase == 'example_description': self.handle_example_description(self.current_example, line) elif self.phase ==", "in line: # commands must contain a colon unless they", "return True # continue def handle_command(self, command, rest): #print(\"<PHASE: %s,", "self.name = \"\" self.purpose = \"\" self.provider_names = [] self.related_modules", "= line.split(':', 1) command = tokens[0].replace(\"#\",\"\").strip().lower() rest = tokens[1].strip() return", "implied. # See the License for the specific language governing", "'example_description': self.handle_example_description(self.current_example, line) elif self.phase == 'example_code': self.handle_example_code(self.current_example, line) elif", "only move back into example phase by # hitting a", "under the Apache License, Version 2.0 (the \"License\"); # you", "example start block if command == 'end_block': self.set_phase('limbo') else: raise", "as we scan an example self.name = \"\" self.see_files =", "== 'example_description': self.handle_example_description(self.current_example, line) elif self.phase == 'example_code': self.handle_example_code(self.current_example, line)", "so we don't get confused by colons in URLs and", "== 'policy': # return False if is_command: #if command not", "command == 'setup': self.set_phase('done') elif command == 'description': print(\"MOV!\") self.set_phase('example_description')", "not in ['start_block', 'end_block']: print(\"%s set | %-20s | %s\"", "a start block moves us into example phase if command", "example code phase # by hitting an end block if", "self.purpose = \"\" self.provider_names = [] self.related_modules = [] self.category", "'start_block', None) if '#============' in line.replace(\" \",\"\"): return (True, 'end_block',", "a start block if command == 'start_block': self.examples.append(self.current_example) self.current_example =", "'module': # from module mode the only state transition is", "by applicable law or agreed to in writing, software #", "= \"\" self.description = [] self.examples = [] self.current_example =", "can only move into example description phase # by hitting", "if line.startswith(\"DESCRIPTION\"): return (True, 'description', None) tokens = line.split(':', 1)", "# another example start block if command == 'end_block': self.set_phase('limbo')", "elif self.phase == 'limbo': # in limbo, seeing a start", "tokens[0]: # commands must be in all caps. This is", "%s\" % (self.count, rest)) print(\"---------------------------------------------------------\") self.current_example.name = rest elif command", "back into example phase by # hitting a start block", "#if command not in [ 'start_block', 'end_block' ]: # print(\"keyword:", "'policy': # return False if is_command: #if command not in", "another example start block if command == 'end_block': self.set_phase('limbo') else:", "self.description = [] self.examples = [] self.current_example = Example() self.phase", "mode # when we find the description command if command", "if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the module phase should", "description\") else: raise Exception(\"unknown command: %s\" % command) elif self.phase", "be in all caps. This is done # so we", "%s>\" % (self.phase, command, rest)) if self.phase == 'done': return", "by colons in URLs and so on. print(\"REJECT: %s\" %", "while in limbo: %s\" % line) pass elif self.phase ==", "'example': # in example phase we can only move into", "print(\"---------------------------------------------------------\") print(\"%s phase | %s\" % (self.count, self.phase)) print(\"---------------------------------------------------------\") @classmethod", "are sure it is a command if '#------------' in line.replace(\"", "(self.count, line)) #if command == 'policy': # return False if", "is into module_description mode # when we find the description", "phase should be all commands\") elif self.phase == 'description': #", "%s\" % command) elif self.phase == 'limbo': # in limbo,", "return False if is_command: #if command not in [ 'start_block',", "the description command if command == 'example': print(\"---------------------------------------------------------\") print(\"%s exmp", "tokens = line.split(\":\") if tokens[0].upper() != tokens[0]: # commands must", "from_file(cls, filename): r = cls() r.name = os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\") print(\"%s", "% (self.count, command, rest)) if command == 'module': pass elif", "%s\" % self.phase) def handle_example_description(self, example, line): # could be", "os class Example(object): def __init__(self): # things we'll figure out", "None) if \":\" in line: tokens = line.split(\":\") if tokens[0].upper()", "filename): r = cls() r.name = os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\") print(\"%s M", "we can only move back into example phase by #", "= \"\" self.purpose = \"\" self.provider_names = [] self.related_modules =", "self.set_phase('example_description') elif command == 'see_files' or command == 'see_file': self.current_example.see_files", "%s\" % (command, rest)) self.handle_command(command, rest) return True #print(\"PHASE=%s\" %", "== 'related': self.related_modules = [ x.strip() for x in rest.split(\",\")", "%s\" % (self.count, line)) def handle_module_description(self, line): if line.startswith(\"#\"): line", "import os class Example(object): def __init__(self): # things we'll figure", "line): self.count = self.count + 1 (is_command, command, rest) =", "self.related_modules = [ x.strip() for x in rest.split(\",\") ] elif", "or the code example, we want to keep both if", "command == 'start_block': pass elif command == 'category': self.category =", "could be a comment or the code example, we want", "example description phase we can only move into example code", "if command == 'end_block': print(\"-------\") self.set_phase('example_code') else: raise Exception(\"unknown command:", "permissions and # limitations under the License. import os class", "[ x.strip() for x in rest.split(\",\") ] elif command ==", "% command) elif self.phase == 'done': return False else: raise", "'example_code': # in example code phase we can only move", "'done': #print(\"ignoring line while done: %s\" % line) pass else:", "if '#============' in line.replace(\" \",\"\"): return (True, 'end_block', None) #", "[] class Record(object): def __init__(self): # things which we'll figure", "description phase # by hitting the description command if command", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "elif self.phase == 'example': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise", "self.phase == 'done': return False if self.phase == 'module': #", "self.set_phase('description') elif command == 'end_block': raise Exception(\"unexpected end block without", "command: %s\" % command) elif self.phase == 'description': # in", "on. print(\"REJECT: %s\" % tokens[0]) return (False, None, None) #", "we can only move into example description phase # by", "set_phase(self, phase): self.phase = phase print(\"---------------------------------------------------------\") print(\"%s phase | %s\"", "find the description command if command not in ['start_block', 'end_block']:", "description command if command not in ['start_block', 'end_block']: print(\"%s set", "command) elif self.phase == 'example_code': # in example code phase", "== 'end_block': raise Exception(\"unexpected end block without description\") else: raise", "x.strip() for x in rest.split(\",\") ] elif command == 'fyi':", "the specific language governing permissions and # limitations under the", "as we scan the example self.name = \"\" self.purpose =", "phase | %s\" % (self.count, self.phase)) print(\"---------------------------------------------------------\") @classmethod def from_file(cls,", "== 'description': print(\"---------------------------------------------------------\") self.set_phase('description') elif command == 'end_block': raise Exception(\"unexpected", "License. import os class Example(object): def __init__(self): # things we'll", "DESCRIPTION starters return (False, None, None) if not line.startswith(\"#\"): #", "applicable law or agreed to in writing, software # distributed", "<NAME> LLC, <<EMAIL>> # # Licensed under the Apache License,", "@classmethod def from_file(cls, filename): r = cls() r.name = os.path.basename(filename).replace(\".py\",\"\")", "line.replace(\"#\",\"\",1).strip() if line.startswith(\"DESCRIPTION\"): return (True, 'description', None) tokens = line.split(':',", "== 'see_file': self.current_example.see_files = [ x.strip() for x in rest.split(\",\")]", "#if command == 'policy': # return False if is_command: #if", "command) elif self.phase == 'example_description': # in example description phase", "elif command == 'see_files' or command == 'see_file': self.current_example.see_files =", "phase we can only move back into example phase by", "= line.strip() if line: print(\"%s mdesc | %s\" % (self.count,", "line while done: %s\" % line) pass else: raise Exception(\"unknown", "in writing, software # distributed under the License is distributed", "% (self.count, self.phase)) print(\"---------------------------------------------------------\") @classmethod def from_file(cls, filename): r =", "return r def load_command(self, line): if \"DESCRIPTION\" in line or", "] elif command == 'providers': self.providers = [ x.strip() for", "command if command not in ['start_block', 'end_block']: print(\"%s set |", "M | %s\" % ('0', r.name)) data = open(filename).read().splitlines() for", "command == 'start_block': self.set_phase('example') else: raise Exception(\"invalid command: %s\" %", "elif self.phase == 'done': return False else: raise Exception(\"unknown phase:", "which we'll figure out as we scan the example self.name", "line.startswith(\"#\"): # commands must be in comments return (False, None,", "None) if not line.startswith(\"#\"): # commands must be in comments", "'end_block': print(\"-------\") self.set_phase('example_code') else: raise Exception(\"unknown command: %s\" % command)", "(self.count, command, rest)) if command == 'module': pass elif command", "we find # another example start block if command ==", "self.description = [] self.code = [] class Record(object): def __init__(self):", "== 'done': return False if self.phase == 'module': # from", "or command == 'see_file': self.current_example.see_files = [ x.strip() for x", "command if '#------------' in line.replace(\" \",\"\"): return (True, 'start_block', None)", "% (self.count, line)) example.description.append(line) def handle_example_code(self, example, line): line =", "we find the description command if command not in ['start_block',", "== 'fyi': pass elif command == 'description': print(\"---------------------------------------------------------\") self.set_phase('description') elif", "line) elif self.phase == 'limbo': #print(\"ignoring line while in limbo:", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "tokens[0].upper() != tokens[0]: # commands must be in all caps.", "None) # throw away the leading comment line = line.replace(\"#\",\"\",1).strip()", "block without description\") else: raise Exception(\"unknown command: %s\" % command)", "start block if command == 'start_block': self.examples.append(self.current_example) self.current_example = Example()", "License, Version 2.0 (the \"License\"); # you may not use", "self.count = self.count + 1 (is_command, command, rest) = self.load_command(line)", "%s\" % command) elif self.phase == 'description': # in description", "os.path.basename(filename).replace(\".py\",\"\") print(\"=========================================================\") print(\"%s M | %s\" % ('0', r.name)) data", "# You may obtain a copy of the License at", "x in rest.split(\",\")] else: raise Exception(\"unknown command: %s\" % command)", "line) pass else: raise Exception(\"unknown phase: %s\" % self.phase) return", "code phase we can only move back into example phase", "block moves us into limbo until we find # another", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "print(\"%s phase | %s\" % (self.count, self.phase)) print(\"---------------------------------------------------------\") @classmethod def", "r.handle_line(line): break return r def load_command(self, line): if \"DESCRIPTION\" in", "# commands must be in comments return (False, None, None)", "'module': pass elif command == 'start_block': pass elif command ==", "'example': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the example phase", "while done: %s\" % line) pass else: raise Exception(\"unknown phase:", "def handle_line(self, line): self.count = self.count + 1 (is_command, command,", "line.replace(\"#\",\"\").strip(): raise Exception(\"the module phase should be all commands\") elif", "= rest elif command == 'purpose': self.purpose = rest elif", "self.set_phase('example') else: raise Exception(\"unknown command: %s\" % command) elif self.phase", "Copyright 2018 <NAME> LLC, <<EMAIL>> # # Licensed under the", "command, rest)) if command == 'module': pass elif command ==", "in limbo, seeing a start block moves us into example", "us into limbo until we find # another example start", "example, line): line = line.rstrip() example.code.append(line) print(\"%s code | %s\"", "# commands must contain a colon unless they are blocks", "we'll figure out as we scan the example self.name =", "if \":\" in line: tokens = line.split(\":\") if tokens[0].upper() !=", "only move into example code phase # by hitting an", "the License for the specific language governing permissions and #", "= [] self.related_modules = [] self.category = \"\" self.description =", "def handle_module_description(self, line): if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line =", "line or '====' in line: pass elif not \":\" in", "elif command == 'category': self.category = rest elif command ==", "Apache License, Version 2.0 (the \"License\"); # you may not", "example phase by # hitting a start block if command", "all caps. This is done # so we don't get", "\":\" in line: # commands must contain a colon unless", "either express or implied. # See the License for the", "(self.phase, command, rest)) if self.phase == 'done': return False if", "raise Exception(\"invalid command: %s\" % command) elif self.phase == 'limbo':", "rest elif command == 'setup': self.set_phase('done') elif command == 'description':", "# at this point we are sure it is a", "elif self.phase == 'example': # in example phase we can", "can only move back into example phase by # hitting", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "command: %s\" % command) elif self.phase == 'done': return False", "command) elif self.phase == 'description': # in description phase end", "the code example, we want to keep both if line.startswith(\"#\"):", "if not line.startswith(\"#\"): # commands must be in comments return", "phase end block moves us into limbo until we find", "2018 <NAME> LLC, <<EMAIL>> # # Licensed under the Apache", "= [] self.current_example = Example() self.phase = 'module' self.count =", "# things which we'll figure out as we scan the", "elif self.phase == 'example_code': self.handle_example_code(self.current_example, line) elif self.phase == 'limbo':", "%s\" % (self.count, command, rest)) if command == 'module': pass", "line): if \"DESCRIPTION\" in line or '----' in line or", "in rest.split(\",\") ] elif command == 'providers': self.providers = [", "command) elif self.phase == 'limbo': # in limbo, seeing a", "'module': if not line.startswith(\"#\") or line.replace(\"#\",\"\").strip(): raise Exception(\"the module phase", "command not in ['start_block', 'end_block']: print(\"%s set | %-20s |", "pass elif command == 'description': print(\"---------------------------------------------------------\") self.set_phase('description') elif command ==", "example self.name = \"\" self.see_files = [] self.description = []", "[ x.strip() for x in rest.split(\",\")] else: raise Exception(\"unknown command:", "#print(\"LINE=%s\" % line) if self.phase == 'module': if not line.startswith(\"#\")", "phase # by hitting an end block if command ==", "confused by colons in URLs and so on. print(\"REJECT: %s\"", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "False if self.phase == 'module': # from module mode the", "line: tokens = line.split(\":\") if tokens[0].upper() != tokens[0]: # commands", "rest elif command == 'related': self.related_modules = [ x.strip() for", "self.handle_example_code(self.current_example, line) elif self.phase == 'limbo': #print(\"ignoring line while in", "command == 'related': self.related_modules = [ x.strip() for x in", "in limbo: %s\" % line) pass elif self.phase == 'done':", "line: pass elif not \":\" in line: # commands must", "# so we don't get confused by colons in URLs", "tokens[1].strip() return (True, command, rest) def handle_line(self, line): self.count =", "line.split(':', 1) command = tokens[0].replace(\"#\",\"\").strip().lower() rest = tokens[1].strip() return (True,", "#print(\"PHASE=%s\" % self.phase) #print(\"LINE=%s\" % line) if self.phase == 'module':", "=> %s\" % (command, rest)) self.handle_command(command, rest) return True #print(\"PHASE=%s\"", "limitations under the License. import os class Example(object): def __init__(self):", "else: raise Exception(\"unknown command: %s\" % command) elif self.phase ==", "command) elif self.phase == 'example': # in example phase we", "# by hitting an end block if command == 'end_block':", "move back into example phase by # hitting a start", "%s\" % (self.count, self.phase)) print(\"---------------------------------------------------------\") @classmethod def from_file(cls, filename): r", "print(\"%s line | %s\" % (self.count, line)) #if command ==", "% (self.phase, command, rest)) if self.phase == 'done': return False", "\",\"\"): return (True, 'end_block', None) # throw away the leading", "for x in rest.split(\",\")] else: raise Exception(\"unknown command: %s\" %", "This is done # so we don't get confused by", "in all caps. This is done # so we don't", "line.replace(\"#\",\"\").strip(): raise Exception(\"the example phase should be all commands\") elif", "must contain a colon unless they are blocks or DESCRIPTION", "\"License\"); # you may not use this file except in", "in line or '====' in line: pass elif not \":\"", "return (False, None, None) if not line.startswith(\"#\"): # commands must", "def handle_example_description(self, example, line): # could be a comment or", "= line.strip() print(\"%s desc | %s\" % (self.count, line)) example.description.append(line)", "both if line.startswith(\"#\"): line = line.replace(\"#\",\"\") line = line.strip() print(\"%s", "state transition is into module_description mode # when we find", "'done': return False else: raise Exception(\"unknown phase: %s\" % self.phase)", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "line) elif self.phase == 'example_code': self.handle_example_code(self.current_example, line) elif self.phase ==", "by hitting an end block if command == 'end_block': print(\"-------\")", "out as we scan an example self.name = \"\" self.see_files", "Record(object): def __init__(self): # things which we'll figure out as", "(False, None, None) if not line.startswith(\"#\"): # commands must be", "phase should be all commands\") elif self.phase == 'example_description': self.handle_example_description(self.current_example,", "command: %s\" % command) elif self.phase == 'example_description': # in", "Exception(\"invalid command: %s\" % command) elif self.phase == 'example': #", "self.set_phase('example_code') else: raise Exception(\"unknown command: %s\" % command) elif self.phase", "mode the only state transition is into module_description mode #", "= 'module' self.count = 0 def set_phase(self, phase): self.phase =", "# distributed under the License is distributed on an \"AS", "'start_block', 'end_block' ]: # print(\"keyword: %s => %s\" % (command,", "line = line.strip() if line: print(\"%s mdesc | %s\" %", "# Unless required by applicable law or agreed to in", "we can only move into example code phase # by", "phase by # hitting a start block if command ==", "open(filename).read().splitlines() for line in data: if not r.handle_line(line): break return", "% line) pass else: raise Exception(\"unknown phase: %s\" % self.phase)", "or DESCRIPTION starters return (False, None, None) if not line.startswith(\"#\"):", "[] self.current_example = Example() self.phase = 'module' self.count = 0", "== 'description': # in description phase end block moves us", "%s\" % line) pass elif self.phase == 'done': #print(\"ignoring line", "== 'end_block': print(\"-------\") self.set_phase('example_code') else: raise Exception(\"unknown command: %s\" %", "line)) example.description.append(line) def handle_example_code(self, example, line): line = line.rstrip() example.code.append(line)", "caps. This is done # so we don't get confused", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "= [ x.strip() for x in rest.split(\",\")] else: raise Exception(\"unknown", "# by hitting the description command if command == 'example':", "self.provider_names = [] self.related_modules = [] self.category = \"\" self.description", "You may obtain a copy of the License at #", "\"\" self.see_files = [] self.description = [] self.code = []", "tokens[0].replace(\"#\",\"\").strip().lower() rest = tokens[1].strip() return (True, command, rest) def handle_line(self,", "comment or the code example, we want to keep both", "if line: print(\"%s mdesc | %s\" % (self.count, line)) self.description.append(line)", "command == 'fyi': pass elif command == 'description': print(\"---------------------------------------------------------\") self.set_phase('description')", "== 'start_block': pass elif command == 'category': self.category = rest", "= \"\" self.see_files = [] self.description = [] self.code =", "comment line = line.replace(\"#\",\"\",1).strip() if line.startswith(\"DESCRIPTION\"): return (True, 'description', None)", "== 'see_files' or command == 'see_file': self.current_example.see_files = [ x.strip()", "Exception(\"unknown phase: %s\" % self.phase) return True # continue def", "phase: %s\" % self.phase) def handle_example_description(self, example, line): # could", "in line: pass elif not \":\" in line: # commands", "in example phase we can only move into example description", "the Apache License, Version 2.0 (the \"License\"); # you may", "line | %s\" % (self.count, line)) #if command == 'policy':", "else: raise Exception(\"unknown phase: %s\" % self.phase) return True #", "example.code.append(line) print(\"%s code | %s\" % (self.count, line)) def handle_module_description(self,", "elif command == 'related': self.related_modules = [ x.strip() for x", "be all commands\") elif self.phase == 'description': # module description", "handle_command(self, command, rest): #print(\"<PHASE: %s, COMMAND: %s, REST: %s>\" %", "Exception(\"invalid command: %s\" % command) elif self.phase == 'limbo': #", "return (True, 'end_block', None) # throw away the leading comment", "# in example code phase we can only move back", "must be comments self.handle_module_description(line) elif self.phase == 'example': if not", "print(\"%s set | %-20s | %s\" % (self.count, command, rest))" ]
[ "OR =31 PRINT =32 RETURN = 33 SUPER = 34", "22 #keywords AND = 23 CLASS = 24 ELSE =", "35 TRUE = 36 VAR = 37 WHILE = 38", "= 11 #one or two character tokens BANG = 12", "= 5 DOT = 6 MINUS = 7 PLUS =", "FUN = 27 FOR = 28 IF = 29 NIL", "#one or two character tokens BANG = 12 BANG_EQUAL =", "LEFT_BRACE = 3 RIGHT_BRACE = 4 COMMA = 5 DOT", "= 20 STRING = 21 NUMBER = 22 #keywords AND", "21 NUMBER = 22 #keywords AND = 23 CLASS =", "IF = 29 NIL =30 OR =31 PRINT =32 RETURN", "FALSE = 26 FUN = 27 FOR = 28 IF", "3 RIGHT_BRACE = 4 COMMA = 5 DOT = 6", "TRUE = 36 VAR = 37 WHILE = 38 EOF=", "= 16 GREATER_EQUAL = 17 LESS = 18 LESS_EQUAL =", "NIL =30 OR =31 PRINT =32 RETURN = 33 SUPER", "13 EQUAL = 14 EQUAL_EQUAL = 15 GREATER = 16", "14 EQUAL_EQUAL = 15 GREATER = 16 GREATER_EQUAL = 17", "PLUS = 8 SEMICOLON = 9 SLASH = 10 STAR", "STAR = 11 #one or two character tokens BANG =", "Tokens LEFT_PAREN =1 RIGHT_PAREN =2 LEFT_BRACE = 3 RIGHT_BRACE =", "=1 RIGHT_PAREN =2 LEFT_BRACE = 3 RIGHT_BRACE = 4 COMMA", "7 PLUS = 8 SEMICOLON = 9 SLASH = 10", "character tokens BANG = 12 BANG_EQUAL = 13 EQUAL =", "COMMA = 5 DOT = 6 MINUS = 7 PLUS", "= 9 SLASH = 10 STAR = 11 #one or", "= 23 CLASS = 24 ELSE = 25 FALSE =", "16 GREATER_EQUAL = 17 LESS = 18 LESS_EQUAL = 19", "= 13 EQUAL = 14 EQUAL_EQUAL = 15 GREATER =", "= 34 THIS = 35 TRUE = 36 VAR =", "STRING = 21 NUMBER = 22 #keywords AND = 23", "from enum import Enum class T(Enum): #single character Tokens LEFT_PAREN", "SLASH = 10 STAR = 11 #one or two character", "tokens BANG = 12 BANG_EQUAL = 13 EQUAL = 14", "GREATER = 16 GREATER_EQUAL = 17 LESS = 18 LESS_EQUAL", "= 14 EQUAL_EQUAL = 15 GREATER = 16 GREATER_EQUAL =", "#keywords AND = 23 CLASS = 24 ELSE = 25", "= 28 IF = 29 NIL =30 OR =31 PRINT", "enum import Enum class T(Enum): #single character Tokens LEFT_PAREN =1", "= 27 FOR = 28 IF = 29 NIL =30", "NUMBER = 22 #keywords AND = 23 CLASS = 24", "EQUAL = 14 EQUAL_EQUAL = 15 GREATER = 16 GREATER_EQUAL", "= 12 BANG_EQUAL = 13 EQUAL = 14 EQUAL_EQUAL =", "15 GREATER = 16 GREATER_EQUAL = 17 LESS = 18", "5 DOT = 6 MINUS = 7 PLUS = 8", "THIS = 35 TRUE = 36 VAR = 37 WHILE", "character Tokens LEFT_PAREN =1 RIGHT_PAREN =2 LEFT_BRACE = 3 RIGHT_BRACE", "RETURN = 33 SUPER = 34 THIS = 35 TRUE", "EQUAL_EQUAL = 15 GREATER = 16 GREATER_EQUAL = 17 LESS", "= 35 TRUE = 36 VAR = 37 WHILE =", "= 10 STAR = 11 #one or two character tokens", "27 FOR = 28 IF = 29 NIL =30 OR", "= 22 #keywords AND = 23 CLASS = 24 ELSE", "LEFT_PAREN =1 RIGHT_PAREN =2 LEFT_BRACE = 3 RIGHT_BRACE = 4", "= 29 NIL =30 OR =31 PRINT =32 RETURN =", "= 3 RIGHT_BRACE = 4 COMMA = 5 DOT =", "12 BANG_EQUAL = 13 EQUAL = 14 EQUAL_EQUAL = 15", "LESS = 18 LESS_EQUAL = 19 #Literals IDENTIFIER = 20", "= 19 #Literals IDENTIFIER = 20 STRING = 21 NUMBER", "= 6 MINUS = 7 PLUS = 8 SEMICOLON =", "= 21 NUMBER = 22 #keywords AND = 23 CLASS", "10 STAR = 11 #one or two character tokens BANG", "= 4 COMMA = 5 DOT = 6 MINUS =", "SEMICOLON = 9 SLASH = 10 STAR = 11 #one", "= 24 ELSE = 25 FALSE = 26 FUN =", "IDENTIFIER = 20 STRING = 21 NUMBER = 22 #keywords", "=31 PRINT =32 RETURN = 33 SUPER = 34 THIS", "=30 OR =31 PRINT =32 RETURN = 33 SUPER =", "PRINT =32 RETURN = 33 SUPER = 34 THIS =", "two character tokens BANG = 12 BANG_EQUAL = 13 EQUAL", "= 36 VAR = 37 WHILE = 38 EOF= 39", "Enum class T(Enum): #single character Tokens LEFT_PAREN =1 RIGHT_PAREN =2", "18 LESS_EQUAL = 19 #Literals IDENTIFIER = 20 STRING =", "25 FALSE = 26 FUN = 27 FOR = 28", "33 SUPER = 34 THIS = 35 TRUE = 36", "20 STRING = 21 NUMBER = 22 #keywords AND =", "19 #Literals IDENTIFIER = 20 STRING = 21 NUMBER =", "= 26 FUN = 27 FOR = 28 IF =", "8 SEMICOLON = 9 SLASH = 10 STAR = 11", "DOT = 6 MINUS = 7 PLUS = 8 SEMICOLON", "24 ELSE = 25 FALSE = 26 FUN = 27", "9 SLASH = 10 STAR = 11 #one or two", "= 33 SUPER = 34 THIS = 35 TRUE =", "class T(Enum): #single character Tokens LEFT_PAREN =1 RIGHT_PAREN =2 LEFT_BRACE", "GREATER_EQUAL = 17 LESS = 18 LESS_EQUAL = 19 #Literals", "ELSE = 25 FALSE = 26 FUN = 27 FOR", "28 IF = 29 NIL =30 OR =31 PRINT =32", "11 #one or two character tokens BANG = 12 BANG_EQUAL", "FOR = 28 IF = 29 NIL =30 OR =31", "= 17 LESS = 18 LESS_EQUAL = 19 #Literals IDENTIFIER", "= 15 GREATER = 16 GREATER_EQUAL = 17 LESS =", "BANG = 12 BANG_EQUAL = 13 EQUAL = 14 EQUAL_EQUAL", "23 CLASS = 24 ELSE = 25 FALSE = 26", "BANG_EQUAL = 13 EQUAL = 14 EQUAL_EQUAL = 15 GREATER", "CLASS = 24 ELSE = 25 FALSE = 26 FUN", "MINUS = 7 PLUS = 8 SEMICOLON = 9 SLASH", "SUPER = 34 THIS = 35 TRUE = 36 VAR", "= 25 FALSE = 26 FUN = 27 FOR =", "26 FUN = 27 FOR = 28 IF = 29", "34 THIS = 35 TRUE = 36 VAR = 37", "import Enum class T(Enum): #single character Tokens LEFT_PAREN =1 RIGHT_PAREN", "17 LESS = 18 LESS_EQUAL = 19 #Literals IDENTIFIER =", "AND = 23 CLASS = 24 ELSE = 25 FALSE", "4 COMMA = 5 DOT = 6 MINUS = 7", "= 18 LESS_EQUAL = 19 #Literals IDENTIFIER = 20 STRING", "6 MINUS = 7 PLUS = 8 SEMICOLON = 9", "#single character Tokens LEFT_PAREN =1 RIGHT_PAREN =2 LEFT_BRACE = 3", "RIGHT_BRACE = 4 COMMA = 5 DOT = 6 MINUS", "#Literals IDENTIFIER = 20 STRING = 21 NUMBER = 22", "RIGHT_PAREN =2 LEFT_BRACE = 3 RIGHT_BRACE = 4 COMMA =", "=32 RETURN = 33 SUPER = 34 THIS = 35", "= 8 SEMICOLON = 9 SLASH = 10 STAR =", "=2 LEFT_BRACE = 3 RIGHT_BRACE = 4 COMMA = 5", "or two character tokens BANG = 12 BANG_EQUAL = 13", "29 NIL =30 OR =31 PRINT =32 RETURN = 33", "= 7 PLUS = 8 SEMICOLON = 9 SLASH =", "LESS_EQUAL = 19 #Literals IDENTIFIER = 20 STRING = 21", "T(Enum): #single character Tokens LEFT_PAREN =1 RIGHT_PAREN =2 LEFT_BRACE =" ]
[ "def plugin_name(): return \"add-framework\" @staticmethod def brief_description(): return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') #", "return \"add-framework\" @staticmethod def brief_description(): return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') # parse arguments", "argparse import ArgumentParser parser = ArgumentParser(prog=\"cocos %s\" % self.__class__.plugin_name(), description=self.__class__.brief_description())", "import MultiLanguage from package.helper import ProjectHelper class FrameworkAdd(cocos.CCPlugin): @staticmethod def", "ArgumentParser(prog=\"cocos %s\" % self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument(\"name\", metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return parser.parse_args(argv)", "plugin_name(): return \"add-framework\" @staticmethod def brief_description(): return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') # parse", "@staticmethod def brief_description(): return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') # parse arguments def parse_args(self,", "def parse_args(self, argv): from argparse import ArgumentParser parser = ArgumentParser(prog=\"cocos", "argv): args = self.parse_args(argv) name = args.name project = ProjectHelper.get_current_project()", "parse arguments def parse_args(self, argv): from argparse import ArgumentParser parser", "def brief_description(): return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') # parse arguments def parse_args(self, argv):", "parser.add_argument(\"name\", metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return parser.parse_args(argv) def run(self, argv): args =", "return parser.parse_args(argv) def run(self, argv): args = self.parse_args(argv) name =", "\"add-framework\" @staticmethod def brief_description(): return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') # parse arguments def", "MultiLanguage from package.helper import ProjectHelper class FrameworkAdd(cocos.CCPlugin): @staticmethod def plugin_name():", "args = self.parse_args(argv) name = args.name project = ProjectHelper.get_current_project() ProjectHelper.add_framework(project,", "import ArgumentParser parser = ArgumentParser(prog=\"cocos %s\" % self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument(\"name\",", "MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') # parse arguments def parse_args(self, argv): from argparse import", "description=self.__class__.brief_description()) parser.add_argument(\"name\", metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return parser.parse_args(argv) def run(self, argv): args", "import cocos from MultiLanguage import MultiLanguage from package.helper import ProjectHelper", "metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return parser.parse_args(argv) def run(self, argv): args = self.parse_args(argv)", "class FrameworkAdd(cocos.CCPlugin): @staticmethod def plugin_name(): return \"add-framework\" @staticmethod def brief_description():", "return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') # parse arguments def parse_args(self, argv): from argparse", "from MultiLanguage import MultiLanguage from package.helper import ProjectHelper class FrameworkAdd(cocos.CCPlugin):", "brief_description(): return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF') # parse arguments def parse_args(self, argv): from", "% self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument(\"name\", metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return parser.parse_args(argv) def run(self,", "argv): from argparse import ArgumentParser parser = ArgumentParser(prog=\"cocos %s\" %", "import ProjectHelper class FrameworkAdd(cocos.CCPlugin): @staticmethod def plugin_name(): return \"add-framework\" @staticmethod", "ArgumentParser parser = ArgumentParser(prog=\"cocos %s\" % self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument(\"name\", metavar=\"NAME\",", "= self.parse_args(argv) name = args.name project = ProjectHelper.get_current_project() ProjectHelper.add_framework(project, name)", "ProjectHelper class FrameworkAdd(cocos.CCPlugin): @staticmethod def plugin_name(): return \"add-framework\" @staticmethod def", "run(self, argv): args = self.parse_args(argv) name = args.name project =", "help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return parser.parse_args(argv) def run(self, argv): args = self.parse_args(argv) name", "from argparse import ArgumentParser parser = ArgumentParser(prog=\"cocos %s\" % self.__class__.plugin_name(),", "parse_args(self, argv): from argparse import ArgumentParser parser = ArgumentParser(prog=\"cocos %s\"", "def run(self, argv): args = self.parse_args(argv) name = args.name project", "parser = ArgumentParser(prog=\"cocos %s\" % self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument(\"name\", metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME'))", "# parse arguments def parse_args(self, argv): from argparse import ArgumentParser", "self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument(\"name\", metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return parser.parse_args(argv) def run(self, argv):", "parser.parse_args(argv) def run(self, argv): args = self.parse_args(argv) name = args.name", "arguments def parse_args(self, argv): from argparse import ArgumentParser parser =", "from package.helper import ProjectHelper class FrameworkAdd(cocos.CCPlugin): @staticmethod def plugin_name(): return", "cocos from MultiLanguage import MultiLanguage from package.helper import ProjectHelper class", "FrameworkAdd(cocos.CCPlugin): @staticmethod def plugin_name(): return \"add-framework\" @staticmethod def brief_description(): return", "@staticmethod def plugin_name(): return \"add-framework\" @staticmethod def brief_description(): return MultiLanguage.get_string('FRAMEWORK_ADD_BRIEF')", "MultiLanguage import MultiLanguage from package.helper import ProjectHelper class FrameworkAdd(cocos.CCPlugin): @staticmethod", "%s\" % self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument(\"name\", metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return parser.parse_args(argv) def", "package.helper import ProjectHelper class FrameworkAdd(cocos.CCPlugin): @staticmethod def plugin_name(): return \"add-framework\"", "<reponame>dios-game/dios-cocos<gh_stars>1-10 import cocos from MultiLanguage import MultiLanguage from package.helper import", "= ArgumentParser(prog=\"cocos %s\" % self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument(\"name\", metavar=\"NAME\", help=MultiLanguage.get_string('FRAMEWORK_ADD_ARG_NAME')) return" ]
[ "vmin=None, vmax=None, vcenter=None, clip=False): self.vcenter = vcenter colors.Normalize.__init__(self, vmin, vmax,", "clip) def __call__(self, value, clip=None): # I'm ignoring masked values", "value, clip=None): # I'm ignoring masked values and all kinds", "to make a # simple example... x, y = [self.vmin,", "values and all kinds of edge cases to make a", "x, y = [self.vmin, self.vcenter, self.vmax], [self.vmin, self.vcenter, self.vmax] return", "matplotlib import colors import numpy as np class SaveOutput: def", "class SaveOutput: def __init__(self): self.outputs = [] def __call__(self, module,", "# I'm ignoring masked values and all kinds of edge", "clear(self): self.outputs = [] class MidpointNormalize(colors.Normalize): def __init__(self, vmin=None, vmax=None,", "import colors import numpy as np class SaveOutput: def __init__(self):", "def __init__(self, vmin=None, vmax=None, vcenter=None, clip=False): self.vcenter = vcenter colors.Normalize.__init__(self,", "self.outputs = [] def __call__(self, module, module_in, module_out): self.outputs.append(module_out) def", "kinds of edge cases to make a # simple example...", "colors import numpy as np class SaveOutput: def __init__(self): self.outputs", "vmin, vmax, clip) def __call__(self, value, clip=None): # I'm ignoring", "clip=False): self.vcenter = vcenter colors.Normalize.__init__(self, vmin, vmax, clip) def __call__(self,", "self.outputs = [] class MidpointNormalize(colors.Normalize): def __init__(self, vmin=None, vmax=None, vcenter=None,", "__init__(self, vmin=None, vmax=None, vcenter=None, clip=False): self.vcenter = vcenter colors.Normalize.__init__(self, vmin,", "and all kinds of edge cases to make a #", "= vcenter colors.Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None):", "np class SaveOutput: def __init__(self): self.outputs = [] def __call__(self,", "module_out): self.outputs.append(module_out) def clear(self): self.outputs = [] class MidpointNormalize(colors.Normalize): def", "self.outputs.append(module_out) def clear(self): self.outputs = [] class MidpointNormalize(colors.Normalize): def __init__(self,", "from matplotlib import colors import numpy as np class SaveOutput:", "= [] def __call__(self, module, module_in, module_out): self.outputs.append(module_out) def clear(self):", "[] class MidpointNormalize(colors.Normalize): def __init__(self, vmin=None, vmax=None, vcenter=None, clip=False): self.vcenter", "__call__(self, value, clip=None): # I'm ignoring masked values and all", "edge cases to make a # simple example... x, y", "a # simple example... x, y = [self.vmin, self.vcenter, self.vmax],", "[self.vmin, self.vcenter, self.vmax], [self.vmin, self.vcenter, self.vmax] return np.ma.masked_array(np.interp(value, x, y))", "y = [self.vmin, self.vcenter, self.vmax], [self.vmin, self.vcenter, self.vmax] return np.ma.masked_array(np.interp(value,", "as np class SaveOutput: def __init__(self): self.outputs = [] def", "def __call__(self, module, module_in, module_out): self.outputs.append(module_out) def clear(self): self.outputs =", "def clear(self): self.outputs = [] class MidpointNormalize(colors.Normalize): def __init__(self, vmin=None,", "class MidpointNormalize(colors.Normalize): def __init__(self, vmin=None, vmax=None, vcenter=None, clip=False): self.vcenter =", "all kinds of edge cases to make a # simple", "of edge cases to make a # simple example... x,", "vcenter=None, clip=False): self.vcenter = vcenter colors.Normalize.__init__(self, vmin, vmax, clip) def", "example... x, y = [self.vmin, self.vcenter, self.vmax], [self.vmin, self.vcenter, self.vmax]", "def __call__(self, value, clip=None): # I'm ignoring masked values and", "= [self.vmin, self.vcenter, self.vmax], [self.vmin, self.vcenter, self.vmax] return np.ma.masked_array(np.interp(value, x,", "clip=None): # I'm ignoring masked values and all kinds of", "numpy as np class SaveOutput: def __init__(self): self.outputs = []", "vmax=None, vcenter=None, clip=False): self.vcenter = vcenter colors.Normalize.__init__(self, vmin, vmax, clip)", "[] def __call__(self, module, module_in, module_out): self.outputs.append(module_out) def clear(self): self.outputs", "vcenter colors.Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None): #", "simple example... x, y = [self.vmin, self.vcenter, self.vmax], [self.vmin, self.vcenter,", "module_in, module_out): self.outputs.append(module_out) def clear(self): self.outputs = [] class MidpointNormalize(colors.Normalize):", "SaveOutput: def __init__(self): self.outputs = [] def __call__(self, module, module_in,", "__init__(self): self.outputs = [] def __call__(self, module, module_in, module_out): self.outputs.append(module_out)", "ignoring masked values and all kinds of edge cases to", "masked values and all kinds of edge cases to make", "make a # simple example... x, y = [self.vmin, self.vcenter,", "module, module_in, module_out): self.outputs.append(module_out) def clear(self): self.outputs = [] class", "= [] class MidpointNormalize(colors.Normalize): def __init__(self, vmin=None, vmax=None, vcenter=None, clip=False):", "import numpy as np class SaveOutput: def __init__(self): self.outputs =", "# simple example... x, y = [self.vmin, self.vcenter, self.vmax], [self.vmin,", "def __init__(self): self.outputs = [] def __call__(self, module, module_in, module_out):", "self.vcenter = vcenter colors.Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value,", "colors.Normalize.__init__(self, vmin, vmax, clip) def __call__(self, value, clip=None): # I'm", "I'm ignoring masked values and all kinds of edge cases", "__call__(self, module, module_in, module_out): self.outputs.append(module_out) def clear(self): self.outputs = []", "cases to make a # simple example... x, y =", "vmax, clip) def __call__(self, value, clip=None): # I'm ignoring masked", "MidpointNormalize(colors.Normalize): def __init__(self, vmin=None, vmax=None, vcenter=None, clip=False): self.vcenter = vcenter" ]
[ "init_timers(self): ioloop = tornado.ioloop.IOLoop.instance() # The mongo status monitor. We", "None self.app.mongodb = None @tornado.gen.coroutine def monitor_mongo_status(self): if (self.mongotimerbusy): self.log.warning('monitor_mongo_status:", "a link to the owning application. self.app = app self.log", "server is shutting down, never mind') return self.mongotimerbusy = True", "except Exception as ex: self.log.error('Mongo client not alive: %s', ex)", "and is open self.mongotimerbusy = False # true while monitor_mongo_status", "self.mongo: try: self.mongo.disconnect() except Exception as ex: self.log.error('Problem disconnecting mongo:", "# Keep a link to the owning application. self.app =", "ex) self.mongo = None self.app.mongodb = None @tornado.gen.coroutine def monitor_mongo_status(self):", "never mind') return self.mongotimerbusy = True if (self.mongoavailable): try: res", "connection. We'll open it in the # first monitor_mongo_status call.", "application. self.app = app self.log = self.app.log # This will", "previous call jam?') return if (self.app.shuttingdown): self.log.warning('monitor_mongo_status: server is shutting", "(not self.mongoavailable): try: self.mongo = motor.MotorClient(tz_aware=True) res = yield motor.Op(self.mongo.open)", "if (not self.mongoavailable): self.close() if (not self.mongoavailable): try: self.mongo =", "as ex: self.log.error('Problem disconnecting mongo: %s', ex) self.mongo = None", "ioloop.add_callback(self.monitor_mongo_status) res = tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000) res.start() def close(self): \"\"\"Close the", "runs # We also manage self.app.mongodb, a MotorDatabase. This must", "self.log.warning('monitor_mongo_status: already in flight; did a previous call jam?') return", "= None self.mongoavailable = False # true if self.mongo exists", "ioloop = tornado.ioloop.IOLoop.instance() # The mongo status monitor. We set", "non-None exactly when mongoavailable is true. def init_timers(self): ioloop =", "motor.Op(self.mongo.open) ### maybe authenticate to a database? self.mongoavailable = True", "None self.log.error('Mongo client not open: %s', ex) self.mongotimerbusy = False", "\"\"\" Manage the connection to the MongoDB server. \"\"\" import", "False self.app.mongodb = None self.log.error('Mongo client not open: %s', ex)", "again, or try to.) \"\"\" if self.mongo: try: self.mongo.disconnect() except", "We also manage self.app.mongodb, a MotorDatabase. This must be #", "@tornado.gen.coroutine def monitor_mongo_status(self): if (self.mongotimerbusy): self.log.warning('monitor_mongo_status: already in flight; did", "= self.app.log # This will be the Motor (MongoDB) connection.", "# This will be the Motor (MongoDB) connection. We'll open", "self.mongotimerbusy = False # true while monitor_mongo_status runs # We", "when mongoavailable is true. def init_timers(self): ioloop = tornado.ioloop.IOLoop.instance() #", "res = tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000) res.start() def close(self): \"\"\"Close the connection", "close(self): \"\"\"Close the connection to mongodb. (The monitor will start", "self.mongoavailable = False except Exception as ex: self.log.error('Mongo client not", "or try to.) \"\"\" if self.mongo: try: self.mongo.disconnect() except Exception", "def init_timers(self): ioloop = tornado.ioloop.IOLoop.instance() # The mongo status monitor.", "be the Motor (MongoDB) connection. We'll open it in the", "False except Exception as ex: self.log.error('Mongo client not alive: %s',", "= False # true while monitor_mongo_status runs # We also", "seconds. ioloop.add_callback(self.monitor_mongo_status) res = tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000) res.start() def close(self): \"\"\"Close", "\"\"\" if self.mongo: try: self.mongo.disconnect() except Exception as ex: self.log.error('Problem", "return if (self.app.shuttingdown): self.log.warning('monitor_mongo_status: server is shutting down, never mind')", "monitor. We set up one call immediately, and then #", "client not alive') self.mongoavailable = False except Exception as ex:", "will be the Motor (MongoDB) connection. We'll open it in", "if (self.mongotimerbusy): self.log.warning('monitor_mongo_status: already in flight; did a previous call", "down, never mind') return self.mongotimerbusy = True if (self.mongoavailable): try:", "True self.app.mongodb = self.mongo[self.app.opts.mongo_database] self.log.info('Mongo client open') self.app.queue_command({'cmd':'dbconnected'}) except Exception", "connection to the MongoDB server. \"\"\" import tornado.gen import tornado.ioloop", "as ex: self.log.error('Mongo client not alive: %s', ex) self.mongoavailable =", "authenticate to a database? self.mongoavailable = True self.app.mongodb = self.mongo[self.app.opts.mongo_database]", "to the owning application. self.app = app self.log = self.app.log", "True if (self.mongoavailable): try: res = yield motor.Op(self.mongo.admin.command, 'ping') if", "Manage the connection to the MongoDB server. \"\"\" import tornado.gen", "res.start() def close(self): \"\"\"Close the connection to mongodb. (The monitor", "tornado.gen import tornado.ioloop import motor class MongoMgr(object): def __init__(self, app):", "manage self.app.mongodb, a MotorDatabase. This must be # non-None exactly", "to a database? self.mongoavailable = True self.app.mongodb = self.mongo[self.app.opts.mongo_database] self.log.info('Mongo", "= self.mongo[self.app.opts.mongo_database] self.log.info('Mongo client open') self.app.queue_command({'cmd':'dbconnected'}) except Exception as ex:", "in the # first monitor_mongo_status call. self.mongo = None self.mongoavailable", "if (self.mongoavailable): try: res = yield motor.Op(self.mongo.admin.command, 'ping') if (not", "it right back up again, or try to.) \"\"\" if", "true. def init_timers(self): ioloop = tornado.ioloop.IOLoop.instance() # The mongo status", "not alive') self.mongoavailable = False except Exception as ex: self.log.error('Mongo", "### maybe authenticate to a database? self.mongoavailable = True self.app.mongodb", "try: self.mongo.disconnect() except Exception as ex: self.log.error('Problem disconnecting mongo: %s',", "self.close() if (not self.mongoavailable): try: self.mongo = motor.MotorClient(tz_aware=True) res =", "self.mongo = None self.app.mongodb = None @tornado.gen.coroutine def monitor_mongo_status(self): if", "self.mongo.disconnect() except Exception as ex: self.log.error('Problem disconnecting mongo: %s', ex)", "Motor (MongoDB) connection. We'll open it in the # first", "true while monitor_mongo_status runs # We also manage self.app.mongodb, a", "set up one call immediately, and then # try again", "3000) res.start() def close(self): \"\"\"Close the connection to mongodb. (The", "it in the # first monitor_mongo_status call. self.mongo = None", "This will be the Motor (MongoDB) connection. We'll open it", "# non-None exactly when mongoavailable is true. def init_timers(self): ioloop", "yield motor.Op(self.mongo.admin.command, 'ping') if (not res): self.log.error('Mongo client not alive')", "# true if self.mongo exists and is open self.mongotimerbusy =", "Exception as ex: self.mongoavailable = False self.app.mongodb = None self.log.error('Mongo", "call immediately, and then # try again every three seconds.", "(self.mongotimerbusy): self.log.warning('monitor_mongo_status: already in flight; did a previous call jam?')", "ex: self.log.error('Mongo client not alive: %s', ex) self.mongoavailable = False", "immediately, and then # try again every three seconds. ioloop.add_callback(self.monitor_mongo_status)", "mongo: %s', ex) self.mongo = None self.app.mongodb = None @tornado.gen.coroutine", "self.mongoavailable): try: self.mongo = motor.MotorClient(tz_aware=True) res = yield motor.Op(self.mongo.open) ###", "not alive: %s', ex) self.mongoavailable = False if (not self.mongoavailable):", "self.mongoavailable = False self.app.mongodb = None self.log.error('Mongo client not open:", "# first monitor_mongo_status call. self.mongo = None self.mongoavailable = False", "MongoMgr(object): def __init__(self, app): # Keep a link to the", "(MongoDB) connection. We'll open it in the # first monitor_mongo_status", "call. self.mongo = None self.mongoavailable = False # true if", "a MotorDatabase. This must be # non-None exactly when mongoavailable", "# try again every three seconds. ioloop.add_callback(self.monitor_mongo_status) res = tornado.ioloop.PeriodicCallback(self.monitor_mongo_status,", "motor.Op(self.mongo.admin.command, 'ping') if (not res): self.log.error('Mongo client not alive') self.mongoavailable", "self.app.queue_command({'cmd':'dbconnected'}) except Exception as ex: self.mongoavailable = False self.app.mongodb =", "def __init__(self, app): # Keep a link to the owning", "= None self.log.error('Mongo client not open: %s', ex) self.mongotimerbusy =", "owning application. self.app = app self.log = self.app.log # This", "res = yield motor.Op(self.mongo.open) ### maybe authenticate to a database?", "disconnecting mongo: %s', ex) self.mongo = None self.app.mongodb = None", "try: self.mongo = motor.MotorClient(tz_aware=True) res = yield motor.Op(self.mongo.open) ### maybe", "alive') self.mongoavailable = False except Exception as ex: self.log.error('Mongo client", "jam?') return if (self.app.shuttingdown): self.log.warning('monitor_mongo_status: server is shutting down, never", "a previous call jam?') return if (self.app.shuttingdown): self.log.warning('monitor_mongo_status: server is", "import tornado.gen import tornado.ioloop import motor class MongoMgr(object): def __init__(self,", "to mongodb. (The monitor will start it right back up", "open') self.app.queue_command({'cmd':'dbconnected'}) except Exception as ex: self.mongoavailable = False self.app.mongodb", "try again every three seconds. ioloop.add_callback(self.monitor_mongo_status) res = tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000)", "the connection to mongodb. (The monitor will start it right", "We set up one call immediately, and then # try", "monitor_mongo_status runs # We also manage self.app.mongodb, a MotorDatabase. This", "every three seconds. ioloop.add_callback(self.monitor_mongo_status) res = tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000) res.start() def", "try: res = yield motor.Op(self.mongo.admin.command, 'ping') if (not res): self.log.error('Mongo", "monitor will start it right back up again, or try", "the # first monitor_mongo_status call. self.mongo = None self.mongoavailable =", "try to.) \"\"\" if self.mongo: try: self.mongo.disconnect() except Exception as", "The mongo status monitor. We set up one call immediately,", "maybe authenticate to a database? self.mongoavailable = True self.app.mongodb =", "self.mongo exists and is open self.mongotimerbusy = False # true", "the MongoDB server. \"\"\" import tornado.gen import tornado.ioloop import motor", "= False if (not self.mongoavailable): self.close() if (not self.mongoavailable): try:", "if (not self.mongoavailable): try: self.mongo = motor.MotorClient(tz_aware=True) res = yield", "self.mongo = None self.mongoavailable = False # true if self.mongo", "up again, or try to.) \"\"\" if self.mongo: try: self.mongo.disconnect()", "yield motor.Op(self.mongo.open) ### maybe authenticate to a database? self.mongoavailable =", "\"\"\"Close the connection to mongodb. (The monitor will start it", "self.mongotimerbusy = True if (self.mongoavailable): try: res = yield motor.Op(self.mongo.admin.command,", "status monitor. We set up one call immediately, and then", "# We also manage self.app.mongodb, a MotorDatabase. This must be", "import tornado.ioloop import motor class MongoMgr(object): def __init__(self, app): #", "again every three seconds. ioloop.add_callback(self.monitor_mongo_status) res = tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000) res.start()", "tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000) res.start() def close(self): \"\"\"Close the connection to mongodb.", "%s', ex) self.mongo = None self.app.mongodb = None @tornado.gen.coroutine def", "class MongoMgr(object): def __init__(self, app): # Keep a link to", "self.log = self.app.log # This will be the Motor (MongoDB)", "be # non-None exactly when mongoavailable is true. def init_timers(self):", "right back up again, or try to.) \"\"\" if self.mongo:", "Exception as ex: self.log.error('Problem disconnecting mongo: %s', ex) self.mongo =", "# The mongo status monitor. We set up one call", "exists and is open self.mongotimerbusy = False # true while", "= False # true if self.mongo exists and is open", "ex) self.mongoavailable = False if (not self.mongoavailable): self.close() if (not", "first monitor_mongo_status call. self.mongo = None self.mongoavailable = False #", "true if self.mongo exists and is open self.mongotimerbusy = False", "self.app = app self.log = self.app.log # This will be", "False # true while monitor_mongo_status runs # We also manage", "This must be # non-None exactly when mongoavailable is true.", "= True self.app.mongodb = self.mongo[self.app.opts.mongo_database] self.log.info('Mongo client open') self.app.queue_command({'cmd':'dbconnected'}) except", "is open self.mongotimerbusy = False # true while monitor_mongo_status runs", "three seconds. ioloop.add_callback(self.monitor_mongo_status) res = tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000) res.start() def close(self):", "res = yield motor.Op(self.mongo.admin.command, 'ping') if (not res): self.log.error('Mongo client", "(not self.mongoavailable): self.close() if (not self.mongoavailable): try: self.mongo = motor.MotorClient(tz_aware=True)", "a database? self.mongoavailable = True self.app.mongodb = self.mongo[self.app.opts.mongo_database] self.log.info('Mongo client", "tornado.ioloop.IOLoop.instance() # The mongo status monitor. We set up one", "ex: self.mongoavailable = False self.app.mongodb = None self.log.error('Mongo client not", "We'll open it in the # first monitor_mongo_status call. self.mongo", "mind') return self.mongotimerbusy = True if (self.mongoavailable): try: res =", "to the MongoDB server. \"\"\" import tornado.gen import tornado.ioloop import", "= True if (self.mongoavailable): try: res = yield motor.Op(self.mongo.admin.command, 'ping')", "# true while monitor_mongo_status runs # We also manage self.app.mongodb,", "alive: %s', ex) self.mongoavailable = False if (not self.mongoavailable): self.close()", "did a previous call jam?') return if (self.app.shuttingdown): self.log.warning('monitor_mongo_status: server", "then # try again every three seconds. ioloop.add_callback(self.monitor_mongo_status) res =", "= app self.log = self.app.log # This will be the", "None @tornado.gen.coroutine def monitor_mongo_status(self): if (self.mongotimerbusy): self.log.warning('monitor_mongo_status: already in flight;", "monitor_mongo_status(self): if (self.mongotimerbusy): self.log.warning('monitor_mongo_status: already in flight; did a previous", "self.mongoavailable = True self.app.mongodb = self.mongo[self.app.opts.mongo_database] self.log.info('Mongo client open') self.app.queue_command({'cmd':'dbconnected'})", "False # true if self.mongo exists and is open self.mongotimerbusy", "self.app.log # This will be the Motor (MongoDB) connection. We'll", "while monitor_mongo_status runs # We also manage self.app.mongodb, a MotorDatabase.", "ex: self.log.error('Problem disconnecting mongo: %s', ex) self.mongo = None self.app.mongodb", "= tornado.ioloop.IOLoop.instance() # The mongo status monitor. We set up", "shutting down, never mind') return self.mongotimerbusy = True if (self.mongoavailable):", "self.app.mongodb = self.mongo[self.app.opts.mongo_database] self.log.info('Mongo client open') self.app.queue_command({'cmd':'dbconnected'}) except Exception as", "None self.mongoavailable = False # true if self.mongo exists and", "self.app.mongodb, a MotorDatabase. This must be # non-None exactly when", "mongoavailable is true. def init_timers(self): ioloop = tornado.ioloop.IOLoop.instance() # The", "mongo status monitor. We set up one call immediately, and", "MongoDB server. \"\"\" import tornado.gen import tornado.ioloop import motor class", "= None self.app.mongodb = None @tornado.gen.coroutine def monitor_mongo_status(self): if (self.mongotimerbusy):", "database? self.mongoavailable = True self.app.mongodb = self.mongo[self.app.opts.mongo_database] self.log.info('Mongo client open')", "open self.mongotimerbusy = False # true while monitor_mongo_status runs #", "server. \"\"\" import tornado.gen import tornado.ioloop import motor class MongoMgr(object):", "in flight; did a previous call jam?') return if (self.app.shuttingdown):", "(self.app.shuttingdown): self.log.warning('monitor_mongo_status: server is shutting down, never mind') return self.mongotimerbusy", "as ex: self.mongoavailable = False self.app.mongodb = None self.log.error('Mongo client", "self.log.warning('monitor_mongo_status: server is shutting down, never mind') return self.mongotimerbusy =", "mongodb. (The monitor will start it right back up again,", "self.app.mongodb = None self.log.error('Mongo client not open: %s', ex) self.mongotimerbusy", "link to the owning application. self.app = app self.log =", "self.log.error('Mongo client not alive: %s', ex) self.mongoavailable = False if", "%s', ex) self.mongoavailable = False if (not self.mongoavailable): self.close() if", "MotorDatabase. This must be # non-None exactly when mongoavailable is", "will start it right back up again, or try to.)", "motor.MotorClient(tz_aware=True) res = yield motor.Op(self.mongo.open) ### maybe authenticate to a", "= yield motor.Op(self.mongo.open) ### maybe authenticate to a database? self.mongoavailable", "is shutting down, never mind') return self.mongotimerbusy = True if", "self.mongoavailable = False if (not self.mongoavailable): self.close() if (not self.mongoavailable):", "def monitor_mongo_status(self): if (self.mongotimerbusy): self.log.warning('monitor_mongo_status: already in flight; did a", "self.mongoavailable): self.close() if (not self.mongoavailable): try: self.mongo = motor.MotorClient(tz_aware=True) res", "False if (not self.mongoavailable): self.close() if (not self.mongoavailable): try: self.mongo", "Keep a link to the owning application. self.app = app", "and then # try again every three seconds. ioloop.add_callback(self.monitor_mongo_status) res", "self.log.error('Mongo client not alive') self.mongoavailable = False except Exception as", "__init__(self, app): # Keep a link to the owning application.", "self.mongoavailable = False # true if self.mongo exists and is", "app self.log = self.app.log # This will be the Motor", "self.mongo = motor.MotorClient(tz_aware=True) res = yield motor.Op(self.mongo.open) ### maybe authenticate", "import motor class MongoMgr(object): def __init__(self, app): # Keep a", "if self.mongo: try: self.mongo.disconnect() except Exception as ex: self.log.error('Problem disconnecting", "= yield motor.Op(self.mongo.admin.command, 'ping') if (not res): self.log.error('Mongo client not", "client open') self.app.queue_command({'cmd':'dbconnected'}) except Exception as ex: self.mongoavailable = False", "= False self.app.mongodb = None self.log.error('Mongo client not open: %s',", "monitor_mongo_status call. self.mongo = None self.mongoavailable = False # true", "if self.mongo exists and is open self.mongotimerbusy = False #", "open it in the # first monitor_mongo_status call. self.mongo =", "flight; did a previous call jam?') return if (self.app.shuttingdown): self.log.warning('monitor_mongo_status:", "def close(self): \"\"\"Close the connection to mongodb. (The monitor will", "\"\"\" import tornado.gen import tornado.ioloop import motor class MongoMgr(object): def", "is true. def init_timers(self): ioloop = tornado.ioloop.IOLoop.instance() # The mongo", "= tornado.ioloop.PeriodicCallback(self.monitor_mongo_status, 3000) res.start() def close(self): \"\"\"Close the connection to", "<reponame>erkyrath/tworld \"\"\" Manage the connection to the MongoDB server. \"\"\"", "= False except Exception as ex: self.log.error('Mongo client not alive:", "motor class MongoMgr(object): def __init__(self, app): # Keep a link", "self.mongo[self.app.opts.mongo_database] self.log.info('Mongo client open') self.app.queue_command({'cmd':'dbconnected'}) except Exception as ex: self.mongoavailable", "(not res): self.log.error('Mongo client not alive') self.mongoavailable = False except", "back up again, or try to.) \"\"\" if self.mongo: try:", "also manage self.app.mongodb, a MotorDatabase. This must be # non-None", "to.) \"\"\" if self.mongo: try: self.mongo.disconnect() except Exception as ex:", "start it right back up again, or try to.) \"\"\"", "except Exception as ex: self.log.error('Problem disconnecting mongo: %s', ex) self.mongo", "already in flight; did a previous call jam?') return if", "self.app.mongodb = None @tornado.gen.coroutine def monitor_mongo_status(self): if (self.mongotimerbusy): self.log.warning('monitor_mongo_status: already", "call jam?') return if (self.app.shuttingdown): self.log.warning('monitor_mongo_status: server is shutting down,", "tornado.ioloop import motor class MongoMgr(object): def __init__(self, app): # Keep", "the connection to the MongoDB server. \"\"\" import tornado.gen import", "(self.mongoavailable): try: res = yield motor.Op(self.mongo.admin.command, 'ping') if (not res):", "up one call immediately, and then # try again every", "one call immediately, and then # try again every three", "= None @tornado.gen.coroutine def monitor_mongo_status(self): if (self.mongotimerbusy): self.log.warning('monitor_mongo_status: already in", "if (self.app.shuttingdown): self.log.warning('monitor_mongo_status: server is shutting down, never mind') return", "res): self.log.error('Mongo client not alive') self.mongoavailable = False except Exception", "client not alive: %s', ex) self.mongoavailable = False if (not", "self.log.info('Mongo client open') self.app.queue_command({'cmd':'dbconnected'}) except Exception as ex: self.mongoavailable =", "Exception as ex: self.log.error('Mongo client not alive: %s', ex) self.mongoavailable", "connection to mongodb. (The monitor will start it right back", "the Motor (MongoDB) connection. We'll open it in the #", "(The monitor will start it right back up again, or", "'ping') if (not res): self.log.error('Mongo client not alive') self.mongoavailable =", "if (not res): self.log.error('Mongo client not alive') self.mongoavailable = False", "= motor.MotorClient(tz_aware=True) res = yield motor.Op(self.mongo.open) ### maybe authenticate to", "return self.mongotimerbusy = True if (self.mongoavailable): try: res = yield", "app): # Keep a link to the owning application. self.app", "must be # non-None exactly when mongoavailable is true. def", "except Exception as ex: self.mongoavailable = False self.app.mongodb = None", "the owning application. self.app = app self.log = self.app.log #", "self.log.error('Problem disconnecting mongo: %s', ex) self.mongo = None self.app.mongodb =", "exactly when mongoavailable is true. def init_timers(self): ioloop = tornado.ioloop.IOLoop.instance()" ]
[ "A_data = concatenate([A_actuals,zeros(10000)]) B_data = concatenate([B_actuals,zeros(10000)]) # Modeling conversions with", "sampleSuccessRateForBinomial(B_sessions,B_purchases) # Modeling the spend with a log-normal A_non_zero_data =", "> 0] B_non_zero_data = B_data[B_data > 0] A_spend = sampleMeanForLogNormal(A_non_zero_data)", "size=100) # Plus some zeros A_data = concatenate([A_actuals,zeros(10000)]) B_data =", "Log-Normal data A_actuals = lognormal(mean=4.10, sigma=1.0, size=100) B_actuals = lognormal(mean=4.00,", "2014 ####### ################################################# from bayesianABTest import sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB from", "sampleMeanForLogNormal(B_non_zero_data) # Combining the two A_rps = A_CR*A_spend B_rps =", "= lognormal(mean=4.00, sigma=1.0, size=100) # Plus some zeros A_data =", "0) A_sessions = len(A_data) B_purchases = sum(B_data > 0) B_sessions", "two A_rps = A_CR*A_spend B_rps = B_CR*B_spend # Result: print", "A_CR = sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR = sampleSuccessRateForBinomial(B_sessions,B_purchases) # Modeling the spend", "= concatenate([A_actuals,zeros(10000)]) B_data = concatenate([B_actuals,zeros(10000)]) # Modeling conversions with a", "data A_actuals = lognormal(mean=4.10, sigma=1.0, size=100) B_actuals = lognormal(mean=4.00, sigma=1.0,", "# Generate Log-Normal data A_actuals = lognormal(mean=4.10, sigma=1.0, size=100) B_actuals", "A_data[A_data > 0] B_non_zero_data = B_data[B_data > 0] A_spend =", "####### ####### Date: April 2014 ####### ################################################# from bayesianABTest import", "B_data = concatenate([B_actuals,zeros(10000)]) # Modeling conversions with a binomial variable", "= A_data[A_data > 0] B_non_zero_data = B_data[B_data > 0] A_spend", "####### Date: April 2014 ####### ################################################# from bayesianABTest import sampleSuccessRateForBinomial,", "concatenate, zeros # Generate Log-Normal data A_actuals = lognormal(mean=4.10, sigma=1.0,", "################################################# from bayesianABTest import sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB from numpy.random import", "spend with a log-normal A_non_zero_data = A_data[A_data > 0] B_non_zero_data", "= lognormal(mean=4.10, sigma=1.0, size=100) B_actuals = lognormal(mean=4.00, sigma=1.0, size=100) #", "zeros # Generate Log-Normal data A_actuals = lognormal(mean=4.10, sigma=1.0, size=100)", "A_rps = A_CR*A_spend B_rps = B_CR*B_spend # Result: print probabilityOfABetterThanB(A_rps,B_rps)", "April 2014 ####### ################################################# from bayesianABTest import sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB", "> 0) B_sessions = len(B_data) A_CR = sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR =", "####### ################################################# from bayesianABTest import sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB from numpy.random", "= sampleSuccessRateForBinomial(B_sessions,B_purchases) # Modeling the spend with a log-normal A_non_zero_data", "mean, concatenate, zeros # Generate Log-Normal data A_actuals = lognormal(mean=4.10,", "A_non_zero_data = A_data[A_data > 0] B_non_zero_data = B_data[B_data > 0]", "# Modeling the spend with a log-normal A_non_zero_data = A_data[A_data", "binomial variable A_purchases = sum(A_data > 0) A_sessions = len(A_data)", "with a log-normal A_non_zero_data = A_data[A_data > 0] B_non_zero_data =", "A_sessions = len(A_data) B_purchases = sum(B_data > 0) B_sessions =", "= B_data[B_data > 0] A_spend = sampleMeanForLogNormal(A_non_zero_data) B_spend = sampleMeanForLogNormal(B_non_zero_data)", "0) B_sessions = len(B_data) A_CR = sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR = sampleSuccessRateForBinomial(B_sessions,B_purchases)", "B_sessions = len(B_data) A_CR = sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR = sampleSuccessRateForBinomial(B_sessions,B_purchases) #", "= sampleMeanForLogNormal(B_non_zero_data) # Combining the two A_rps = A_CR*A_spend B_rps", "with a binomial variable A_purchases = sum(A_data > 0) A_sessions", "= sampleMeanForLogNormal(A_non_zero_data) B_spend = sampleMeanForLogNormal(B_non_zero_data) # Combining the two A_rps", "# Modeling conversions with a binomial variable A_purchases = sum(A_data", "= sum(A_data > 0) A_sessions = len(A_data) B_purchases = sum(B_data", "0] B_non_zero_data = B_data[B_data > 0] A_spend = sampleMeanForLogNormal(A_non_zero_data) B_spend", "sum(B_data > 0) B_sessions = len(B_data) A_CR = sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR", "concatenate([A_actuals,zeros(10000)]) B_data = concatenate([B_actuals,zeros(10000)]) # Modeling conversions with a binomial", "zeros A_data = concatenate([A_actuals,zeros(10000)]) B_data = concatenate([B_actuals,zeros(10000)]) # Modeling conversions", "####### Contact: <EMAIL> ####### ####### Date: April 2014 ####### #################################################", "Author: <NAME> ####### ####### Contact: <EMAIL> ####### ####### Date: April", "> 0] A_spend = sampleMeanForLogNormal(A_non_zero_data) B_spend = sampleMeanForLogNormal(B_non_zero_data) # Combining", "import lognormal from numpy import mean, concatenate, zeros # Generate", "Contact: <EMAIL> ####### ####### Date: April 2014 ####### ################################################# from", "concatenate([B_actuals,zeros(10000)]) # Modeling conversions with a binomial variable A_purchases =", "import sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB from numpy.random import lognormal from numpy", "sigma=1.0, size=100) # Plus some zeros A_data = concatenate([A_actuals,zeros(10000)]) B_data", "B_actuals = lognormal(mean=4.00, sigma=1.0, size=100) # Plus some zeros A_data", "Plus some zeros A_data = concatenate([A_actuals,zeros(10000)]) B_data = concatenate([B_actuals,zeros(10000)]) #", "<NAME> ####### ####### Contact: <EMAIL> ####### ####### Date: April 2014", "> 0) A_sessions = len(A_data) B_purchases = sum(B_data > 0)", "sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR = sampleSuccessRateForBinomial(B_sessions,B_purchases) # Modeling the spend with a", "= len(A_data) B_purchases = sum(B_data > 0) B_sessions = len(B_data)", "from bayesianABTest import sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB from numpy.random import lognormal", "####### Author: <NAME> ####### ####### Contact: <EMAIL> ####### ####### Date:", "size=100) B_actuals = lognormal(mean=4.00, sigma=1.0, size=100) # Plus some zeros", "sum(A_data > 0) A_sessions = len(A_data) B_purchases = sum(B_data >", "sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB from numpy.random import lognormal from numpy import", "A_actuals = lognormal(mean=4.10, sigma=1.0, size=100) B_actuals = lognormal(mean=4.00, sigma=1.0, size=100)", "################################################# ####### Author: <NAME> ####### ####### Contact: <EMAIL> ####### #######", "probabilityOfABetterThanB from numpy.random import lognormal from numpy import mean, concatenate,", "len(A_data) B_purchases = sum(B_data > 0) B_sessions = len(B_data) A_CR", "from numpy.random import lognormal from numpy import mean, concatenate, zeros", "conversions with a binomial variable A_purchases = sum(A_data > 0)", "Modeling conversions with a binomial variable A_purchases = sum(A_data >", "a binomial variable A_purchases = sum(A_data > 0) A_sessions =", "the spend with a log-normal A_non_zero_data = A_data[A_data > 0]", "some zeros A_data = concatenate([A_actuals,zeros(10000)]) B_data = concatenate([B_actuals,zeros(10000)]) # Modeling", "B_CR = sampleSuccessRateForBinomial(B_sessions,B_purchases) # Modeling the spend with a log-normal", "a log-normal A_non_zero_data = A_data[A_data > 0] B_non_zero_data = B_data[B_data", "len(B_data) A_CR = sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR = sampleSuccessRateForBinomial(B_sessions,B_purchases) # Modeling the", "= concatenate([B_actuals,zeros(10000)]) # Modeling conversions with a binomial variable A_purchases", "= sum(B_data > 0) B_sessions = len(B_data) A_CR = sampleSuccessRateForBinomial(A_sessions,A_purchases)", "variable A_purchases = sum(A_data > 0) A_sessions = len(A_data) B_purchases", "from numpy import mean, concatenate, zeros # Generate Log-Normal data", "Generate Log-Normal data A_actuals = lognormal(mean=4.10, sigma=1.0, size=100) B_actuals =", "sigma=1.0, size=100) B_actuals = lognormal(mean=4.00, sigma=1.0, size=100) # Plus some", "0] A_spend = sampleMeanForLogNormal(A_non_zero_data) B_spend = sampleMeanForLogNormal(B_non_zero_data) # Combining the", "A_purchases = sum(A_data > 0) A_sessions = len(A_data) B_purchases =", "import mean, concatenate, zeros # Generate Log-Normal data A_actuals =", "B_data[B_data > 0] A_spend = sampleMeanForLogNormal(A_non_zero_data) B_spend = sampleMeanForLogNormal(B_non_zero_data) #", "numpy import mean, concatenate, zeros # Generate Log-Normal data A_actuals", "= len(B_data) A_CR = sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR = sampleSuccessRateForBinomial(B_sessions,B_purchases) # Modeling", "# Plus some zeros A_data = concatenate([A_actuals,zeros(10000)]) B_data = concatenate([B_actuals,zeros(10000)])", "B_non_zero_data = B_data[B_data > 0] A_spend = sampleMeanForLogNormal(A_non_zero_data) B_spend =", "# Combining the two A_rps = A_CR*A_spend B_rps = B_CR*B_spend", "A_spend = sampleMeanForLogNormal(A_non_zero_data) B_spend = sampleMeanForLogNormal(B_non_zero_data) # Combining the two", "sampleMeanForLogNormal, probabilityOfABetterThanB from numpy.random import lognormal from numpy import mean,", "sampleMeanForLogNormal(A_non_zero_data) B_spend = sampleMeanForLogNormal(B_non_zero_data) # Combining the two A_rps =", "the two A_rps = A_CR*A_spend B_rps = B_CR*B_spend # Result:", "= sampleSuccessRateForBinomial(A_sessions,A_purchases) B_CR = sampleSuccessRateForBinomial(B_sessions,B_purchases) # Modeling the spend with", "lognormal(mean=4.00, sigma=1.0, size=100) # Plus some zeros A_data = concatenate([A_actuals,zeros(10000)])", "####### ####### Contact: <EMAIL> ####### ####### Date: April 2014 #######", "<EMAIL> ####### ####### Date: April 2014 ####### ################################################# from bayesianABTest", "Combining the two A_rps = A_CR*A_spend B_rps = B_CR*B_spend #", "Date: April 2014 ####### ################################################# from bayesianABTest import sampleSuccessRateForBinomial, sampleMeanForLogNormal,", "Modeling the spend with a log-normal A_non_zero_data = A_data[A_data >", "numpy.random import lognormal from numpy import mean, concatenate, zeros #", "lognormal(mean=4.10, sigma=1.0, size=100) B_actuals = lognormal(mean=4.00, sigma=1.0, size=100) # Plus", "bayesianABTest import sampleSuccessRateForBinomial, sampleMeanForLogNormal, probabilityOfABetterThanB from numpy.random import lognormal from", "B_spend = sampleMeanForLogNormal(B_non_zero_data) # Combining the two A_rps = A_CR*A_spend", "lognormal from numpy import mean, concatenate, zeros # Generate Log-Normal", "B_purchases = sum(B_data > 0) B_sessions = len(B_data) A_CR =", "log-normal A_non_zero_data = A_data[A_data > 0] B_non_zero_data = B_data[B_data >" ]
[ "model.to_dict() == {'data': [ {'id': '123', 'type': 'dictionary'}, {'id': 'abc',", "import UpdateFields from airslate.entities.fields import Field def test_empty_update_fields__to_dict(): model =", "Field def test_empty_update_fields__to_dict(): model = UpdateFields() assert model.to_dict() == {'data':", "with this source code. from airslate.models.documents import UpdateFields from airslate.entities.fields", "model = UpdateFields(data=[Field('123'), Field('abc')]) assert model.to_dict() == {'data': [ {'id':", "Field('abc')]) assert model.to_dict() == {'data': [ {'id': '123', 'type': 'dictionary'},", "this source code. from airslate.models.documents import UpdateFields from airslate.entities.fields import", "assert model.to_dict() == {'data': [ {'id': '123', 'type': 'dictionary'}, {'id':", "== {'data': [ {'id': '123', 'type': 'dictionary'}, {'id': 'abc', 'type':", "view # the LICENSE file that was distributed with this", "This file is part of the airslate. # # Copyright", "== {'data': []} def test_update_fields__to_dict(): model = UpdateFields(data=[Field('123'), Field('abc')]) assert", "[]} def test_update_fields__to_dict(): model = UpdateFields(data=[Field('123'), Field('abc')]) assert model.to_dict() ==", "LICENSE file that was distributed with this source code. from", "airslate.models.documents import UpdateFields from airslate.entities.fields import Field def test_empty_update_fields__to_dict(): model", "file is part of the airslate. # # Copyright (c)", "was distributed with this source code. from airslate.models.documents import UpdateFields", "test_empty_update_fields__to_dict(): model = UpdateFields() assert model.to_dict() == {'data': []} def", "2021 airSlate, Inc. # # For the full copyright and", "is part of the airslate. # # Copyright (c) 2021", "Copyright (c) 2021 airSlate, Inc. # # For the full", "full copyright and license information, please view # the LICENSE", "distributed with this source code. from airslate.models.documents import UpdateFields from", "and license information, please view # the LICENSE file that", "information, please view # the LICENSE file that was distributed", "from airslate.entities.fields import Field def test_empty_update_fields__to_dict(): model = UpdateFields() assert", "For the full copyright and license information, please view #", "def test_empty_update_fields__to_dict(): model = UpdateFields() assert model.to_dict() == {'data': []}", "file that was distributed with this source code. from airslate.models.documents", "UpdateFields() assert model.to_dict() == {'data': []} def test_update_fields__to_dict(): model =", "= UpdateFields() assert model.to_dict() == {'data': []} def test_update_fields__to_dict(): model", "# Copyright (c) 2021 airSlate, Inc. # # For the", "Inc. # # For the full copyright and license information,", "def test_update_fields__to_dict(): model = UpdateFields(data=[Field('123'), Field('abc')]) assert model.to_dict() == {'data':", "that was distributed with this source code. from airslate.models.documents import", "import Field def test_empty_update_fields__to_dict(): model = UpdateFields() assert model.to_dict() ==", "# # Copyright (c) 2021 airSlate, Inc. # # For", "# For the full copyright and license information, please view", "model.to_dict() == {'data': []} def test_update_fields__to_dict(): model = UpdateFields(data=[Field('123'), Field('abc')])", "copyright and license information, please view # the LICENSE file", "of the airslate. # # Copyright (c) 2021 airSlate, Inc.", "the LICENSE file that was distributed with this source code.", "# the LICENSE file that was distributed with this source", "model = UpdateFields() assert model.to_dict() == {'data': []} def test_update_fields__to_dict():", "[ {'id': '123', 'type': 'dictionary'}, {'id': 'abc', 'type': 'dictionary'} ]}", "license information, please view # the LICENSE file that was", "(c) 2021 airSlate, Inc. # # For the full copyright", "assert model.to_dict() == {'data': []} def test_update_fields__to_dict(): model = UpdateFields(data=[Field('123'),", "code. from airslate.models.documents import UpdateFields from airslate.entities.fields import Field def", "from airslate.models.documents import UpdateFields from airslate.entities.fields import Field def test_empty_update_fields__to_dict():", "# # For the full copyright and license information, please", "airSlate, Inc. # # For the full copyright and license", "the full copyright and license information, please view # the", "test_update_fields__to_dict(): model = UpdateFields(data=[Field('123'), Field('abc')]) assert model.to_dict() == {'data': [", "= UpdateFields(data=[Field('123'), Field('abc')]) assert model.to_dict() == {'data': [ {'id': '123',", "{'data': [ {'id': '123', 'type': 'dictionary'}, {'id': 'abc', 'type': 'dictionary'}", "# This file is part of the airslate. # #", "{'data': []} def test_update_fields__to_dict(): model = UpdateFields(data=[Field('123'), Field('abc')]) assert model.to_dict()", "airslate. # # Copyright (c) 2021 airSlate, Inc. # #", "<filename>tests/models/test_documents.py # This file is part of the airslate. #", "airslate.entities.fields import Field def test_empty_update_fields__to_dict(): model = UpdateFields() assert model.to_dict()", "UpdateFields(data=[Field('123'), Field('abc')]) assert model.to_dict() == {'data': [ {'id': '123', 'type':", "the airslate. # # Copyright (c) 2021 airSlate, Inc. #", "part of the airslate. # # Copyright (c) 2021 airSlate,", "please view # the LICENSE file that was distributed with", "source code. from airslate.models.documents import UpdateFields from airslate.entities.fields import Field", "UpdateFields from airslate.entities.fields import Field def test_empty_update_fields__to_dict(): model = UpdateFields()" ]
[ "def __init__(self, name, id_): self.name = name self.id = id_", "class DynamicObject(object): def __init__(self, name, id_): self.name = name self.id", "DynamicObject(object): def __init__(self, name, id_): self.name = name self.id =" ]
[ "fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from app import", "import api from app.core.config import config app = FastAPI(title=\"Sheypoor\") #", "import config app = FastAPI(title=\"Sheypoor\") # Set all CORS enabled", "app import api from app.core.config import config app = FastAPI(title=\"Sheypoor\")", "enabled origins app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"], ) app.include_router(api.router,", "CORSMiddleware from app import api from app.core.config import config app", "FastAPI(title=\"Sheypoor\") # Set all CORS enabled origins app.add_middleware( CORSMiddleware, allow_origins=[\"*\"],", "app.core.config import config app = FastAPI(title=\"Sheypoor\") # Set all CORS", "config app = FastAPI(title=\"Sheypoor\") # Set all CORS enabled origins", "CORS enabled origins app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"], )", "= FastAPI(title=\"Sheypoor\") # Set all CORS enabled origins app.add_middleware( CORSMiddleware,", "import CORSMiddleware from app import api from app.core.config import config", "fastapi.middleware.cors import CORSMiddleware from app import api from app.core.config import", "from fastapi.middleware.cors import CORSMiddleware from app import api from app.core.config", "FastAPI from fastapi.middleware.cors import CORSMiddleware from app import api from", "from app.core.config import config app = FastAPI(title=\"Sheypoor\") # Set all", "api from app.core.config import config app = FastAPI(title=\"Sheypoor\") # Set", "origins app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"], ) app.include_router(api.router, prefix=config.API_URI)", "from app import api from app.core.config import config app =", "from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from app", "Set all CORS enabled origins app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"],", "all CORS enabled origins app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True, allow_methods=[\"*\"], allow_headers=[\"*\"],", "import FastAPI from fastapi.middleware.cors import CORSMiddleware from app import api", "# Set all CORS enabled origins app.add_middleware( CORSMiddleware, allow_origins=[\"*\"], allow_credentials=True,", "<reponame>meysam81/sheypoor from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from", "app = FastAPI(title=\"Sheypoor\") # Set all CORS enabled origins app.add_middleware(" ]
[ "== '-': # CDS location unknown continue chromosome = parts[0]", "CHROMOSOMES = ('1', '2', '3', '4', '5', '6', '7', '8',", "== '-': # CDS strand negative order = reverse-complement continue", "Skip CDS which are not multiple of three in length.", "with open(CCDS_FILE, encoding='utf-8', newline='\\n') as fp: for line in fp:", "status: # CDS is not yet public continue if parts[6]", "three in length. continue cds.append(CdsPos( ccds_id=ccds_id, molecule='chr' + chromosome, indexes=locations", "location unknown continue chromosome = parts[0] assert chromosome in CHROMOSOMES,", "(exclusive) in reference genome. Whole CDS can be constructed as", "as concatenation of the sub-sequences.\"\"\" molecule: str \"\"\"Molecule name, see", "file with CDS locations within GRCh38 genome as a list", "not in status: # CDS is not yet public continue", "Whole CDS can be constructed as concatenation of the sub-sequences.\"\"\"", "'7', '8', '9', '10', '11', '12', '13', '14', '15', '16',", "cds = [] with open(CCDS_FILE, encoding='utf-8', newline='\\n') as fp: for", "% 3 != 0: # Skip CDS which are not", "'2', '3', '4', '5', '6', '7', '8', '9', '10', '11',", "CHROMOSOMES, chromosome locations = [] assert locations_str.startswith('[') assert locations_str.endswith(']') for", "locations_str.endswith(']') for location_str in locations_str[1:-1].split(','): start_str, stop_str = location_str.split('-') start,", "continue locations_str = parts[9] if locations_str == '-': # CDS", "list \"\"\"2-tuples with start (inclusive) and stop indexes (exclusive) in", ":class:`CdsPos`.\"\"\" cds = [] with open(CCDS_FILE, encoding='utf-8', newline='\\n') as fp:", "= line.split('\\t') ccds_id = parts[4] status = parts[5] if 'Public'", "parts[6] == '-': # CDS strand negative order = reverse-complement", "multiple of three in length. continue cds.append(CdsPos( ccds_id=ccds_id, molecule='chr' +", "!= 0: # Skip CDS which are not multiple of", "genome as a list of :class:`CdsPos`.\"\"\" cds = [] with", "List[CdsPos]: \"\"\"Load file with CDS locations within GRCh38 genome as", "'3', '4', '5', '6', '7', '8', '9', '10', '11', '12',", "indexes: list \"\"\"2-tuples with start (inclusive) and stop indexes (exclusive)", "within GRCh38 genome as a list of :class:`CdsPos`.\"\"\" cds =", "of the sub-sequences.\"\"\" molecule: str \"\"\"Molecule name, see :const:`CHROMOSOMES`\"\"\" def", "# CDS is not yet public continue if parts[6] ==", "of three in length. continue cds.append(CdsPos( ccds_id=ccds_id, molecule='chr' + chromosome,", "as a list of :class:`CdsPos`.\"\"\" cds = [] with open(CCDS_FILE,", "'-': # CDS strand negative order = reverse-complement continue locations_str", "from typing import List, NamedTuple CCDS_FILE = 'CCDS.current.txt' CHROMOSOMES =", "def load_ccds() -> List[CdsPos]: \"\"\"Load file with CDS locations within", "lines continue if line.startswith('#'): # Skip comments continue parts =", "1 locations.append((start, stop)) if sum(b - a for a, b", "if locations_str == '-': # CDS location unknown continue chromosome", "for a, b in locations) % 3 != 0: #", "newline='\\n') as fp: for line in fp: if not line:", "Skip empty lines continue if line.startswith('#'): # Skip comments continue", "if parts[6] == '-': # CDS strand negative order =", "assert locations_str.startswith('[') assert locations_str.endswith(']') for location_str in locations_str[1:-1].split(','): start_str, stop_str", "List, NamedTuple CCDS_FILE = 'CCDS.current.txt' CHROMOSOMES = ('1', '2', '3',", "'21', '22', 'X', 'Y') class CdsPos(NamedTuple): ccds_id: str indexes: list", "locations_str[1:-1].split(','): start_str, stop_str = location_str.split('-') start, stop = int(start_str), int(stop_str)", "parts[5] if 'Public' not in status: # CDS is not", "concatenation of the sub-sequences.\"\"\" molecule: str \"\"\"Molecule name, see :const:`CHROMOSOMES`\"\"\"", "CDS can be constructed as concatenation of the sub-sequences.\"\"\" molecule:", "<filename>cdnu/ccds.py from typing import List, NamedTuple CCDS_FILE = 'CCDS.current.txt' CHROMOSOMES", "\"\"\"Molecule name, see :const:`CHROMOSOMES`\"\"\" def load_ccds() -> List[CdsPos]: \"\"\"Load file", "stop = int(start_str), int(stop_str) + 1 locations.append((start, stop)) if sum(b", "stop)) if sum(b - a for a, b in locations)", "status = parts[5] if 'Public' not in status: # CDS", "with CDS locations within GRCh38 genome as a list of", "line.startswith('#'): # Skip comments continue parts = line.split('\\t') ccds_id =", "- a for a, b in locations) % 3 !=", "negative order = reverse-complement continue locations_str = parts[9] if locations_str", "the sub-sequences.\"\"\" molecule: str \"\"\"Molecule name, see :const:`CHROMOSOMES`\"\"\" def load_ccds()", "CDS strand negative order = reverse-complement continue locations_str = parts[9]", "for line in fp: if not line: # Skip empty", "if 'Public' not in status: # CDS is not yet", "= parts[5] if 'Public' not in status: # CDS is", "parts[0] assert chromosome in CHROMOSOMES, chromosome locations = [] assert", "str indexes: list \"\"\"2-tuples with start (inclusive) and stop indexes", "a for a, b in locations) % 3 != 0:", "'17', '18', '19', '20', '21', '22', 'X', 'Y') class CdsPos(NamedTuple):", "'CCDS.current.txt' CHROMOSOMES = ('1', '2', '3', '4', '5', '6', '7',", "b in locations) % 3 != 0: # Skip CDS", "sub-sequences.\"\"\" molecule: str \"\"\"Molecule name, see :const:`CHROMOSOMES`\"\"\" def load_ccds() ->", "typing import List, NamedTuple CCDS_FILE = 'CCDS.current.txt' CHROMOSOMES = ('1',", "reverse-complement continue locations_str = parts[9] if locations_str == '-': #", "fp: for line in fp: if not line: # Skip", "in length. continue cds.append(CdsPos( ccds_id=ccds_id, molecule='chr' + chromosome, indexes=locations ))", "= [] assert locations_str.startswith('[') assert locations_str.endswith(']') for location_str in locations_str[1:-1].split(','):", "start_str, stop_str = location_str.split('-') start, stop = int(start_str), int(stop_str) +", "in CHROMOSOMES, chromosome locations = [] assert locations_str.startswith('[') assert locations_str.endswith(']')", "continue if parts[6] == '-': # CDS strand negative order", "not yet public continue if parts[6] == '-': # CDS", "in status: # CDS is not yet public continue if", "CDS locations within GRCh38 genome as a list of :class:`CdsPos`.\"\"\"", "continue if line.startswith('#'): # Skip comments continue parts = line.split('\\t')", "import List, NamedTuple CCDS_FILE = 'CCDS.current.txt' CHROMOSOMES = ('1', '2',", "# Skip comments continue parts = line.split('\\t') ccds_id = parts[4]", "parts[4] status = parts[5] if 'Public' not in status: #", "= [] with open(CCDS_FILE, encoding='utf-8', newline='\\n') as fp: for line", "+ 1 locations.append((start, stop)) if sum(b - a for a,", "strand negative order = reverse-complement continue locations_str = parts[9] if", "'4', '5', '6', '7', '8', '9', '10', '11', '12', '13',", "-> List[CdsPos]: \"\"\"Load file with CDS locations within GRCh38 genome", "parts[9] if locations_str == '-': # CDS location unknown continue", "indexes (exclusive) in reference genome. Whole CDS can be constructed", "chromosome locations = [] assert locations_str.startswith('[') assert locations_str.endswith(']') for location_str", "be constructed as concatenation of the sub-sequences.\"\"\" molecule: str \"\"\"Molecule", "in locations_str[1:-1].split(','): start_str, stop_str = location_str.split('-') start, stop = int(start_str),", "chromosome = parts[0] assert chromosome in CHROMOSOMES, chromosome locations =", ":const:`CHROMOSOMES`\"\"\" def load_ccds() -> List[CdsPos]: \"\"\"Load file with CDS locations", "chromosome in CHROMOSOMES, chromosome locations = [] assert locations_str.startswith('[') assert", "'16', '17', '18', '19', '20', '21', '22', 'X', 'Y') class", "('1', '2', '3', '4', '5', '6', '7', '8', '9', '10',", "empty lines continue if line.startswith('#'): # Skip comments continue parts", "stop indexes (exclusive) in reference genome. Whole CDS can be", "in fp: if not line: # Skip empty lines continue", "line in fp: if not line: # Skip empty lines", "parts = line.split('\\t') ccds_id = parts[4] status = parts[5] if", "stop_str = location_str.split('-') start, stop = int(start_str), int(stop_str) + 1", "'6', '7', '8', '9', '10', '11', '12', '13', '14', '15',", "int(start_str), int(stop_str) + 1 locations.append((start, stop)) if sum(b - a", "locations_str == '-': # CDS location unknown continue chromosome =", "'5', '6', '7', '8', '9', '10', '11', '12', '13', '14',", "encoding='utf-8', newline='\\n') as fp: for line in fp: if not", "a, b in locations) % 3 != 0: # Skip", "can be constructed as concatenation of the sub-sequences.\"\"\" molecule: str", "'19', '20', '21', '22', 'X', 'Y') class CdsPos(NamedTuple): ccds_id: str", "'10', '11', '12', '13', '14', '15', '16', '17', '18', '19',", "CDS is not yet public continue if parts[6] == '-':", "fp: if not line: # Skip empty lines continue if", "# CDS strand negative order = reverse-complement continue locations_str =", "line.split('\\t') ccds_id = parts[4] status = parts[5] if 'Public' not", "ccds_id = parts[4] status = parts[5] if 'Public' not in", "is not yet public continue if parts[6] == '-': #", "[] assert locations_str.startswith('[') assert locations_str.endswith(']') for location_str in locations_str[1:-1].split(','): start_str,", "which are not multiple of three in length. continue cds.append(CdsPos(", "'X', 'Y') class CdsPos(NamedTuple): ccds_id: str indexes: list \"\"\"2-tuples with", "for location_str in locations_str[1:-1].split(','): start_str, stop_str = location_str.split('-') start, stop", "# Skip CDS which are not multiple of three in", "sum(b - a for a, b in locations) % 3", "ccds_id: str indexes: list \"\"\"2-tuples with start (inclusive) and stop", "locations_str.startswith('[') assert locations_str.endswith(']') for location_str in locations_str[1:-1].split(','): start_str, stop_str =", "\"\"\"Load file with CDS locations within GRCh38 genome as a", "assert chromosome in CHROMOSOMES, chromosome locations = [] assert locations_str.startswith('[')", "NamedTuple CCDS_FILE = 'CCDS.current.txt' CHROMOSOMES = ('1', '2', '3', '4',", "'8', '9', '10', '11', '12', '13', '14', '15', '16', '17',", "'14', '15', '16', '17', '18', '19', '20', '21', '22', 'X',", "'15', '16', '17', '18', '19', '20', '21', '22', 'X', 'Y')", "# CDS location unknown continue chromosome = parts[0] assert chromosome", "a list of :class:`CdsPos`.\"\"\" cds = [] with open(CCDS_FILE, encoding='utf-8',", "continue parts = line.split('\\t') ccds_id = parts[4] status = parts[5]", "'12', '13', '14', '15', '16', '17', '18', '19', '20', '21',", "= int(start_str), int(stop_str) + 1 locations.append((start, stop)) if sum(b -", "[] with open(CCDS_FILE, encoding='utf-8', newline='\\n') as fp: for line in", "int(stop_str) + 1 locations.append((start, stop)) if sum(b - a for", "with start (inclusive) and stop indexes (exclusive) in reference genome.", "CDS location unknown continue chromosome = parts[0] assert chromosome in", "\"\"\"2-tuples with start (inclusive) and stop indexes (exclusive) in reference", "locations = [] assert locations_str.startswith('[') assert locations_str.endswith(']') for location_str in", "(inclusive) and stop indexes (exclusive) in reference genome. Whole CDS", "name, see :const:`CHROMOSOMES`\"\"\" def load_ccds() -> List[CdsPos]: \"\"\"Load file with", "GRCh38 genome as a list of :class:`CdsPos`.\"\"\" cds = []", "'Y') class CdsPos(NamedTuple): ccds_id: str indexes: list \"\"\"2-tuples with start", "= parts[0] assert chromosome in CHROMOSOMES, chromosome locations = []", "comments continue parts = line.split('\\t') ccds_id = parts[4] status =", "location_str in locations_str[1:-1].split(','): start_str, stop_str = location_str.split('-') start, stop =", "'20', '21', '22', 'X', 'Y') class CdsPos(NamedTuple): ccds_id: str indexes:", "= reverse-complement continue locations_str = parts[9] if locations_str == '-':", "molecule: str \"\"\"Molecule name, see :const:`CHROMOSOMES`\"\"\" def load_ccds() -> List[CdsPos]:", "'22', 'X', 'Y') class CdsPos(NamedTuple): ccds_id: str indexes: list \"\"\"2-tuples", "not line: # Skip empty lines continue if line.startswith('#'): #", "are not multiple of three in length. continue cds.append(CdsPos( ccds_id=ccds_id,", "yet public continue if parts[6] == '-': # CDS strand", "start, stop = int(start_str), int(stop_str) + 1 locations.append((start, stop)) if", "assert locations_str.endswith(']') for location_str in locations_str[1:-1].split(','): start_str, stop_str = location_str.split('-')", "class CdsPos(NamedTuple): ccds_id: str indexes: list \"\"\"2-tuples with start (inclusive)", "= location_str.split('-') start, stop = int(start_str), int(stop_str) + 1 locations.append((start,", "in reference genome. Whole CDS can be constructed as concatenation", "list of :class:`CdsPos`.\"\"\" cds = [] with open(CCDS_FILE, encoding='utf-8', newline='\\n')", "3 != 0: # Skip CDS which are not multiple", "and stop indexes (exclusive) in reference genome. Whole CDS can", "'18', '19', '20', '21', '22', 'X', 'Y') class CdsPos(NamedTuple): ccds_id:", "Skip comments continue parts = line.split('\\t') ccds_id = parts[4] status", "CCDS_FILE = 'CCDS.current.txt' CHROMOSOMES = ('1', '2', '3', '4', '5',", "CdsPos(NamedTuple): ccds_id: str indexes: list \"\"\"2-tuples with start (inclusive) and", "see :const:`CHROMOSOMES`\"\"\" def load_ccds() -> List[CdsPos]: \"\"\"Load file with CDS", "as fp: for line in fp: if not line: #", "not multiple of three in length. continue cds.append(CdsPos( ccds_id=ccds_id, molecule='chr'", "0: # Skip CDS which are not multiple of three", "open(CCDS_FILE, encoding='utf-8', newline='\\n') as fp: for line in fp: if", "= parts[9] if locations_str == '-': # CDS location unknown", "continue chromosome = parts[0] assert chromosome in CHROMOSOMES, chromosome locations", "str \"\"\"Molecule name, see :const:`CHROMOSOMES`\"\"\" def load_ccds() -> List[CdsPos]: \"\"\"Load", "line: # Skip empty lines continue if line.startswith('#'): # Skip", "locations_str = parts[9] if locations_str == '-': # CDS location", "of :class:`CdsPos`.\"\"\" cds = [] with open(CCDS_FILE, encoding='utf-8', newline='\\n') as", "genome. Whole CDS can be constructed as concatenation of the", "public continue if parts[6] == '-': # CDS strand negative", "reference genome. Whole CDS can be constructed as concatenation of", "constructed as concatenation of the sub-sequences.\"\"\" molecule: str \"\"\"Molecule name,", "= 'CCDS.current.txt' CHROMOSOMES = ('1', '2', '3', '4', '5', '6',", "CDS which are not multiple of three in length. continue", "order = reverse-complement continue locations_str = parts[9] if locations_str ==", "length. continue cds.append(CdsPos( ccds_id=ccds_id, molecule='chr' + chromosome, indexes=locations )) return", "location_str.split('-') start, stop = int(start_str), int(stop_str) + 1 locations.append((start, stop))", "continue cds.append(CdsPos( ccds_id=ccds_id, molecule='chr' + chromosome, indexes=locations )) return cds", "= parts[4] status = parts[5] if 'Public' not in status:", "'Public' not in status: # CDS is not yet public", "if line.startswith('#'): # Skip comments continue parts = line.split('\\t') ccds_id", "locations within GRCh38 genome as a list of :class:`CdsPos`.\"\"\" cds", "if sum(b - a for a, b in locations) %", "# Skip empty lines continue if line.startswith('#'): # Skip comments", "'13', '14', '15', '16', '17', '18', '19', '20', '21', '22',", "= ('1', '2', '3', '4', '5', '6', '7', '8', '9',", "in locations) % 3 != 0: # Skip CDS which", "locations) % 3 != 0: # Skip CDS which are", "'-': # CDS location unknown continue chromosome = parts[0] assert", "'9', '10', '11', '12', '13', '14', '15', '16', '17', '18',", "'11', '12', '13', '14', '15', '16', '17', '18', '19', '20',", "load_ccds() -> List[CdsPos]: \"\"\"Load file with CDS locations within GRCh38", "start (inclusive) and stop indexes (exclusive) in reference genome. Whole", "unknown continue chromosome = parts[0] assert chromosome in CHROMOSOMES, chromosome", "locations.append((start, stop)) if sum(b - a for a, b in", "if not line: # Skip empty lines continue if line.startswith('#'):" ]
[ "from gefest.core.structure.polygon import Polygon from gefest.core.structure.structure import Structure from gefest.core.algs.postproc.resolve_errors", "test_too_close(): same_poly = deepcopy(unclosed_triangle_poly) same_poly.id = 'same_triangle' input_structure = Structure([unclosed_triangle_poly,", "poly_length = 20 # creating a testing polygons via corner", "in incorrect_points]) domain = Domain() def test_unclosed_poly(): input_structure = Structure([unclosed_triangle_poly])", "= Polygon('triangle', points=[Point(*coords) for coords in triangle_points]) incorrect_points = [(5,", "(8, poly_length), (5, 5), (5, 30)] incorrect_poly = Polygon('incorrect_poly', points=[Point(*coords)", "domain) assert unclosed_poly(input_structure, domain) assert not unclosed_poly(observed_structure, domain) def test_self_intersection():", "def test_self_intersection(): input_structure = Structure([incorrect_poly]) observed_structure = postprocess(input_structure, domain) assert", "40)]) input_structure = Structure([poly_like_fixed, unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert", "via corner points rectangle_points = [(-1, 40), (-1, poly_length+40), (-poly_width-10,", "deepcopy from gefest.core.structure.point import Point from gefest.core.structure.polygon import Polygon from", "domain) assert not unclosed_poly(observed_structure, domain) def test_self_intersection(): input_structure = Structure([incorrect_poly])", "import Point from gefest.core.structure.polygon import Polygon from gefest.core.structure.structure import Structure", "= Structure([out_bounds_rectangle_poly]) observed_structure = postprocess(input_structure, domain) assert out_of_bound(input_structure, domain) assert", "(5, 30)] incorrect_poly = Polygon('incorrect_poly', points=[Point(*coords) for coords in incorrect_points])", "Point(15, 40)]) input_structure = Structure([poly_like_fixed, unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain)", "gefest.core.structure.point import Point from gefest.core.structure.polygon import Polygon from gefest.core.structure.structure import", "poly_width = 10 poly_length = 20 # creating a testing", "out_bounds_rectangle_poly = Polygon('rectangle', points=[Point(*coords) for coords in rectangle_points]) triangle_points =", "[(1, 1), (poly_width, poly_length), (1, poly_length)] unclosed_triangle_poly = Polygon('triangle', points=[Point(*coords)", "from gefest.core.structure.point import Point from gefest.core.structure.polygon import Polygon from gefest.core.structure.structure", "self_intersection(observed_structure) def test_out_of_bound(): input_structure = Structure([out_bounds_rectangle_poly]) observed_structure = postprocess(input_structure, domain)", "Domain() def test_unclosed_poly(): input_structure = Structure([unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain)", "assert unclosed_poly(input_structure, domain) assert not unclosed_poly(observed_structure, domain) def test_self_intersection(): input_structure", "from gefest.core.structure.structure import Structure from gefest.core.algs.postproc.resolve_errors import * from gefest.core.algs.geom.validation", "import * # marking length and width for testing polygon", "domain) assert all([np.isclose(len(observed_structure.polygons), 2), 'like_fixed' not in [poly.id for poly", "points=[Point(*coords) for coords in rectangle_points]) triangle_points = [(1, 1), (poly_width,", "'same_triangle' input_structure = Structure([unclosed_triangle_poly, same_poly]) observed_structure = postprocess(input_structure, domain) print(observed_structure.polygons)", "rectangle_points = [(-1, 40), (-1, poly_length+40), (-poly_width-10, poly_length+40), (-poly_width-10, 40)]", "marking length and width for testing polygon poly_width = 10", "copy import deepcopy from gefest.core.structure.point import Point from gefest.core.structure.polygon import", "observed_structure.polygons]]) def test_too_close(): same_poly = deepcopy(unclosed_triangle_poly) same_poly.id = 'same_triangle' input_structure", "test_out_of_bound(): input_structure = Structure([out_bounds_rectangle_poly]) observed_structure = postprocess(input_structure, domain) assert out_of_bound(input_structure,", "poly_length), (1, poly_length)] unclosed_triangle_poly = Polygon('triangle', points=[Point(*coords) for coords in", "[poly.id for poly in observed_structure.polygons], 'fixed' in [poly.id for poly", "points=[Point(15, 30), Point(40, 30), Point(15, 40)]) input_structure = Structure([poly_like_fixed, unclosed_triangle_poly])", "polygon poly_width = 10 poly_length = 20 # creating a", "unclosed_poly(input_structure, domain) assert not unclosed_poly(observed_structure, domain) def test_self_intersection(): input_structure =", "incorrect_points]) domain = Domain() def test_unclosed_poly(): input_structure = Structure([unclosed_triangle_poly]) observed_structure", "domain) assert out_of_bound(input_structure, domain) assert not out_of_bound(observed_structure, domain) def test_fixed_polys():", "domain) assert self_intersection(input_structure) assert not self_intersection(observed_structure) def test_out_of_bound(): input_structure =", "= 20 # creating a testing polygons via corner points", "in [poly.id for poly in observed_structure.polygons]]) def test_too_close(): same_poly =", "for coords in triangle_points]) incorrect_points = [(5, 5), (5, poly_length),", "40]]]) poly_like_fixed = Polygon('like_fixed', points=[Point(15, 30), Point(40, 30), Point(15, 40)])", "in observed_structure.polygons], 'fixed' in [poly.id for poly in observed_structure.polygons]]) def", "pytest from copy import deepcopy from gefest.core.structure.point import Point from", "for testing polygon poly_width = 10 poly_length = 20 #", "poly_length), (5, 5), (5, 30)] incorrect_poly = Polygon('incorrect_poly', points=[Point(*coords) for", "assert not out_of_bound(observed_structure, domain) def test_fixed_polys(): domain = Domain(fixed_points=[[[15, 30],", "postprocess(input_structure, domain) assert all([np.isclose(len(observed_structure.polygons), 2), 'like_fixed' not in [poly.id for", "30), Point(40, 30), Point(15, 40)]) input_structure = Structure([poly_like_fixed, unclosed_triangle_poly]) observed_structure", "in rectangle_points]) triangle_points = [(1, 1), (poly_width, poly_length), (1, poly_length)]", "input_structure = Structure([poly_like_fixed, unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert all([np.isclose(len(observed_structure.polygons),", "= Polygon('rectangle', points=[Point(*coords) for coords in rectangle_points]) triangle_points = [(1,", "Polygon('triangle', points=[Point(*coords) for coords in triangle_points]) incorrect_points = [(5, 5),", "input_structure = Structure([unclosed_triangle_poly, same_poly]) observed_structure = postprocess(input_structure, domain) print(observed_structure.polygons) assert", "= postprocess(input_structure, domain) assert unclosed_poly(input_structure, domain) assert not unclosed_poly(observed_structure, domain)", "10 poly_length = 20 # creating a testing polygons via", "all([np.isclose(len(observed_structure.polygons), 2), 'like_fixed' not in [poly.id for poly in observed_structure.polygons],", "from gefest.core.algs.geom.validation import * # marking length and width for", "Point from gefest.core.structure.polygon import Polygon from gefest.core.structure.structure import Structure from", "postprocess(input_structure, domain) assert out_of_bound(input_structure, domain) assert not out_of_bound(observed_structure, domain) def", "creating a testing polygons via corner points rectangle_points = [(-1,", "poly_length)] unclosed_triangle_poly = Polygon('triangle', points=[Point(*coords) for coords in triangle_points]) incorrect_points", "assert self_intersection(input_structure) assert not self_intersection(observed_structure) def test_out_of_bound(): input_structure = Structure([out_bounds_rectangle_poly])", "30], [40, 30], [15, 40]]]) poly_like_fixed = Polygon('like_fixed', points=[Point(15, 30),", "gefest.core.structure.structure import Structure from gefest.core.algs.postproc.resolve_errors import * from gefest.core.algs.geom.validation import", "= [(5, 5), (5, poly_length), (8, poly_length), (5, 5), (5,", "in observed_structure.polygons]]) def test_too_close(): same_poly = deepcopy(unclosed_triangle_poly) same_poly.id = 'same_triangle'", "import Polygon from gefest.core.structure.structure import Structure from gefest.core.algs.postproc.resolve_errors import *", "from gefest.core.algs.postproc.resolve_errors import * from gefest.core.algs.geom.validation import * # marking", "import * from gefest.core.algs.geom.validation import * # marking length and", "corner points rectangle_points = [(-1, 40), (-1, poly_length+40), (-poly_width-10, poly_length+40),", "not in [poly.id for poly in observed_structure.polygons], 'fixed' in [poly.id", "incorrect_poly = Polygon('incorrect_poly', points=[Point(*coords) for coords in incorrect_points]) domain =", "assert all([np.isclose(len(observed_structure.polygons), 2), 'like_fixed' not in [poly.id for poly in", "incorrect_points = [(5, 5), (5, poly_length), (8, poly_length), (5, 5),", "poly_length+40), (-poly_width-10, poly_length+40), (-poly_width-10, 40)] out_bounds_rectangle_poly = Polygon('rectangle', points=[Point(*coords) for", "unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert all([np.isclose(len(observed_structure.polygons), 2), 'like_fixed' not", "in [poly.id for poly in observed_structure.polygons], 'fixed' in [poly.id for", "assert not self_intersection(observed_structure) def test_out_of_bound(): input_structure = Structure([out_bounds_rectangle_poly]) observed_structure =", "input_structure = Structure([incorrect_poly]) observed_structure = postprocess(input_structure, domain) assert self_intersection(input_structure) assert", "triangle_points = [(1, 1), (poly_width, poly_length), (1, poly_length)] unclosed_triangle_poly =", "deepcopy(unclosed_triangle_poly) same_poly.id = 'same_triangle' input_structure = Structure([unclosed_triangle_poly, same_poly]) observed_structure =", "gefest.core.algs.postproc.resolve_errors import * from gefest.core.algs.geom.validation import * # marking length", "poly_like_fixed = Polygon('like_fixed', points=[Point(15, 30), Point(40, 30), Point(15, 40)]) input_structure", "= Domain() def test_unclosed_poly(): input_structure = Structure([unclosed_triangle_poly]) observed_structure = postprocess(input_structure,", "Point(40, 30), Point(15, 40)]) input_structure = Structure([poly_like_fixed, unclosed_triangle_poly]) observed_structure =", "= Structure([incorrect_poly]) observed_structure = postprocess(input_structure, domain) assert self_intersection(input_structure) assert not", "Structure from gefest.core.algs.postproc.resolve_errors import * from gefest.core.algs.geom.validation import * #", "observed_structure = postprocess(input_structure, domain) assert out_of_bound(input_structure, domain) assert not out_of_bound(observed_structure,", "* # marking length and width for testing polygon poly_width", "30)] incorrect_poly = Polygon('incorrect_poly', points=[Point(*coords) for coords in incorrect_points]) domain", "domain) def test_self_intersection(): input_structure = Structure([incorrect_poly]) observed_structure = postprocess(input_structure, domain)", "width for testing polygon poly_width = 10 poly_length = 20", "[(-1, 40), (-1, poly_length+40), (-poly_width-10, poly_length+40), (-poly_width-10, 40)] out_bounds_rectangle_poly =", "30), Point(15, 40)]) input_structure = Structure([poly_like_fixed, unclosed_triangle_poly]) observed_structure = postprocess(input_structure,", "domain = Domain() def test_unclosed_poly(): input_structure = Structure([unclosed_triangle_poly]) observed_structure =", "gefest.core.structure.polygon import Polygon from gefest.core.structure.structure import Structure from gefest.core.algs.postproc.resolve_errors import", "input_structure = Structure([out_bounds_rectangle_poly]) observed_structure = postprocess(input_structure, domain) assert out_of_bound(input_structure, domain)", "for poly in observed_structure.polygons]]) def test_too_close(): same_poly = deepcopy(unclosed_triangle_poly) same_poly.id", "Polygon('incorrect_poly', points=[Point(*coords) for coords in incorrect_points]) domain = Domain() def", "# marking length and width for testing polygon poly_width =", "length and width for testing polygon poly_width = 10 poly_length", "= postprocess(input_structure, domain) assert all([np.isclose(len(observed_structure.polygons), 2), 'like_fixed' not in [poly.id", "= Structure([poly_like_fixed, unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert all([np.isclose(len(observed_structure.polygons), 2),", "(-poly_width-10, poly_length+40), (-poly_width-10, 40)] out_bounds_rectangle_poly = Polygon('rectangle', points=[Point(*coords) for coords", "for poly in observed_structure.polygons], 'fixed' in [poly.id for poly in", "Structure([unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert unclosed_poly(input_structure, domain) assert not", "Polygon from gefest.core.structure.structure import Structure from gefest.core.algs.postproc.resolve_errors import * from", "poly in observed_structure.polygons]]) def test_too_close(): same_poly = deepcopy(unclosed_triangle_poly) same_poly.id =", "Domain(fixed_points=[[[15, 30], [40, 30], [15, 40]]]) poly_like_fixed = Polygon('like_fixed', points=[Point(15,", "domain) assert not out_of_bound(observed_structure, domain) def test_fixed_polys(): domain = Domain(fixed_points=[[[15,", "assert out_of_bound(input_structure, domain) assert not out_of_bound(observed_structure, domain) def test_fixed_polys(): domain", "in triangle_points]) incorrect_points = [(5, 5), (5, poly_length), (8, poly_length),", "not self_intersection(observed_structure) def test_out_of_bound(): input_structure = Structure([out_bounds_rectangle_poly]) observed_structure = postprocess(input_structure,", "postprocess(input_structure, domain) assert self_intersection(input_structure) assert not self_intersection(observed_structure) def test_out_of_bound(): input_structure", "(-1, poly_length+40), (-poly_width-10, poly_length+40), (-poly_width-10, 40)] out_bounds_rectangle_poly = Polygon('rectangle', points=[Point(*coords)", "def test_out_of_bound(): input_structure = Structure([out_bounds_rectangle_poly]) observed_structure = postprocess(input_structure, domain) assert", "(5, poly_length), (8, poly_length), (5, 5), (5, 30)] incorrect_poly =", "= postprocess(input_structure, domain) assert out_of_bound(input_structure, domain) assert not out_of_bound(observed_structure, domain)", "out_of_bound(input_structure, domain) assert not out_of_bound(observed_structure, domain) def test_fixed_polys(): domain =", "domain = Domain(fixed_points=[[[15, 30], [40, 30], [15, 40]]]) poly_like_fixed =", "unclosed_poly(observed_structure, domain) def test_self_intersection(): input_structure = Structure([incorrect_poly]) observed_structure = postprocess(input_structure,", "(1, poly_length)] unclosed_triangle_poly = Polygon('triangle', points=[Point(*coords) for coords in triangle_points])", "30], [15, 40]]]) poly_like_fixed = Polygon('like_fixed', points=[Point(15, 30), Point(40, 30),", "= Polygon('like_fixed', points=[Point(15, 30), Point(40, 30), Point(15, 40)]) input_structure =", "= [(-1, 40), (-1, poly_length+40), (-poly_width-10, poly_length+40), (-poly_width-10, 40)] out_bounds_rectangle_poly", "= deepcopy(unclosed_triangle_poly) same_poly.id = 'same_triangle' input_structure = Structure([unclosed_triangle_poly, same_poly]) observed_structure", "triangle_points]) incorrect_points = [(5, 5), (5, poly_length), (8, poly_length), (5,", "for coords in incorrect_points]) domain = Domain() def test_unclosed_poly(): input_structure", "Polygon('like_fixed', points=[Point(15, 30), Point(40, 30), Point(15, 40)]) input_structure = Structure([poly_like_fixed,", "[40, 30], [15, 40]]]) poly_like_fixed = Polygon('like_fixed', points=[Point(15, 30), Point(40,", "= [(1, 1), (poly_width, poly_length), (1, poly_length)] unclosed_triangle_poly = Polygon('triangle',", "40)] out_bounds_rectangle_poly = Polygon('rectangle', points=[Point(*coords) for coords in rectangle_points]) triangle_points", "same_poly = deepcopy(unclosed_triangle_poly) same_poly.id = 'same_triangle' input_structure = Structure([unclosed_triangle_poly, same_poly])", "out_of_bound(observed_structure, domain) def test_fixed_polys(): domain = Domain(fixed_points=[[[15, 30], [40, 30],", "def test_unclosed_poly(): input_structure = Structure([unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert", "= 'same_triangle' input_structure = Structure([unclosed_triangle_poly, same_poly]) observed_structure = postprocess(input_structure, domain)", "testing polygon poly_width = 10 poly_length = 20 # creating", "observed_structure = postprocess(input_structure, domain) assert self_intersection(input_structure) assert not self_intersection(observed_structure) def", "1), (poly_width, poly_length), (1, poly_length)] unclosed_triangle_poly = Polygon('triangle', points=[Point(*coords) for", "for coords in rectangle_points]) triangle_points = [(1, 1), (poly_width, poly_length),", "not unclosed_poly(observed_structure, domain) def test_self_intersection(): input_structure = Structure([incorrect_poly]) observed_structure =", "self_intersection(input_structure) assert not self_intersection(observed_structure) def test_out_of_bound(): input_structure = Structure([out_bounds_rectangle_poly]) observed_structure", "not out_of_bound(observed_structure, domain) def test_fixed_polys(): domain = Domain(fixed_points=[[[15, 30], [40,", "poly_length), (8, poly_length), (5, 5), (5, 30)] incorrect_poly = Polygon('incorrect_poly',", "import Structure from gefest.core.algs.postproc.resolve_errors import * from gefest.core.algs.geom.validation import *", "Structure([incorrect_poly]) observed_structure = postprocess(input_structure, domain) assert self_intersection(input_structure) assert not self_intersection(observed_structure)", "testing polygons via corner points rectangle_points = [(-1, 40), (-1,", "same_poly.id = 'same_triangle' input_structure = Structure([unclosed_triangle_poly, same_poly]) observed_structure = postprocess(input_structure,", "test_unclosed_poly(): input_structure = Structure([unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert unclosed_poly(input_structure,", "* from gefest.core.algs.geom.validation import * # marking length and width", "a testing polygons via corner points rectangle_points = [(-1, 40),", "points=[Point(*coords) for coords in triangle_points]) incorrect_points = [(5, 5), (5,", "gefest.core.algs.geom.validation import * # marking length and width for testing", "(poly_width, poly_length), (1, poly_length)] unclosed_triangle_poly = Polygon('triangle', points=[Point(*coords) for coords", "Polygon('rectangle', points=[Point(*coords) for coords in rectangle_points]) triangle_points = [(1, 1),", "polygons via corner points rectangle_points = [(-1, 40), (-1, poly_length+40),", "import deepcopy from gefest.core.structure.point import Point from gefest.core.structure.polygon import Polygon", "def test_too_close(): same_poly = deepcopy(unclosed_triangle_poly) same_poly.id = 'same_triangle' input_structure =", "observed_structure = postprocess(input_structure, domain) assert all([np.isclose(len(observed_structure.polygons), 2), 'like_fixed' not in", "points=[Point(*coords) for coords in incorrect_points]) domain = Domain() def test_unclosed_poly():", "= 10 poly_length = 20 # creating a testing polygons", "observed_structure.polygons], 'fixed' in [poly.id for poly in observed_structure.polygons]]) def test_too_close():", "= Structure([unclosed_triangle_poly, same_poly]) observed_structure = postprocess(input_structure, domain) print(observed_structure.polygons) assert np.isclose(len(observed_structure.polygons),", "test_self_intersection(): input_structure = Structure([incorrect_poly]) observed_structure = postprocess(input_structure, domain) assert self_intersection(input_structure)", "test_fixed_polys(): domain = Domain(fixed_points=[[[15, 30], [40, 30], [15, 40]]]) poly_like_fixed", "coords in incorrect_points]) domain = Domain() def test_unclosed_poly(): input_structure =", "poly_length+40), (-poly_width-10, 40)] out_bounds_rectangle_poly = Polygon('rectangle', points=[Point(*coords) for coords in", "coords in rectangle_points]) triangle_points = [(1, 1), (poly_width, poly_length), (1,", "Structure([poly_like_fixed, unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert all([np.isclose(len(observed_structure.polygons), 2), 'like_fixed'", "2), 'like_fixed' not in [poly.id for poly in observed_structure.polygons], 'fixed'", "(5, 5), (5, 30)] incorrect_poly = Polygon('incorrect_poly', points=[Point(*coords) for coords", "(-poly_width-10, 40)] out_bounds_rectangle_poly = Polygon('rectangle', points=[Point(*coords) for coords in rectangle_points])", "= Polygon('incorrect_poly', points=[Point(*coords) for coords in incorrect_points]) domain = Domain()", "= Domain(fixed_points=[[[15, 30], [40, 30], [15, 40]]]) poly_like_fixed = Polygon('like_fixed',", "points rectangle_points = [(-1, 40), (-1, poly_length+40), (-poly_width-10, poly_length+40), (-poly_width-10,", "assert not unclosed_poly(observed_structure, domain) def test_self_intersection(): input_structure = Structure([incorrect_poly]) observed_structure", "Structure([out_bounds_rectangle_poly]) observed_structure = postprocess(input_structure, domain) assert out_of_bound(input_structure, domain) assert not", "poly in observed_structure.polygons], 'fixed' in [poly.id for poly in observed_structure.polygons]])", "observed_structure = postprocess(input_structure, domain) assert unclosed_poly(input_structure, domain) assert not unclosed_poly(observed_structure,", "[poly.id for poly in observed_structure.polygons]]) def test_too_close(): same_poly = deepcopy(unclosed_triangle_poly)", "# creating a testing polygons via corner points rectangle_points =", "5), (5, poly_length), (8, poly_length), (5, 5), (5, 30)] incorrect_poly", "Structure([unclosed_triangle_poly, same_poly]) observed_structure = postprocess(input_structure, domain) print(observed_structure.polygons) assert np.isclose(len(observed_structure.polygons), 1)", "and width for testing polygon poly_width = 10 poly_length =", "rectangle_points]) triangle_points = [(1, 1), (poly_width, poly_length), (1, poly_length)] unclosed_triangle_poly", "= postprocess(input_structure, domain) assert self_intersection(input_structure) assert not self_intersection(observed_structure) def test_out_of_bound():", "coords in triangle_points]) incorrect_points = [(5, 5), (5, poly_length), (8,", "20 # creating a testing polygons via corner points rectangle_points", "= Structure([unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert unclosed_poly(input_structure, domain) assert", "def test_fixed_polys(): domain = Domain(fixed_points=[[[15, 30], [40, 30], [15, 40]]])", "[15, 40]]]) poly_like_fixed = Polygon('like_fixed', points=[Point(15, 30), Point(40, 30), Point(15,", "from copy import deepcopy from gefest.core.structure.point import Point from gefest.core.structure.polygon", "import pytest from copy import deepcopy from gefest.core.structure.point import Point", "domain) def test_fixed_polys(): domain = Domain(fixed_points=[[[15, 30], [40, 30], [15,", "'fixed' in [poly.id for poly in observed_structure.polygons]]) def test_too_close(): same_poly", "5), (5, 30)] incorrect_poly = Polygon('incorrect_poly', points=[Point(*coords) for coords in", "40), (-1, poly_length+40), (-poly_width-10, poly_length+40), (-poly_width-10, 40)] out_bounds_rectangle_poly = Polygon('rectangle',", "[(5, 5), (5, poly_length), (8, poly_length), (5, 5), (5, 30)]", "postprocess(input_structure, domain) assert unclosed_poly(input_structure, domain) assert not unclosed_poly(observed_structure, domain) def", "input_structure = Structure([unclosed_triangle_poly]) observed_structure = postprocess(input_structure, domain) assert unclosed_poly(input_structure, domain)", "unclosed_triangle_poly = Polygon('triangle', points=[Point(*coords) for coords in triangle_points]) incorrect_points =", "'like_fixed' not in [poly.id for poly in observed_structure.polygons], 'fixed' in" ]
[ "in order to ease the creation of mock instances. \"\"\"", "nodes=iter(()), **kwargs): Mock.__init__(self, *args, **kwargs) self.focused = focused self.layout =", "tree async def get_tree(self): \"\"\"Return the i3 tree asynchronously\"\"\" return", "def find_focused(self): \"\"\"Return the focused window\"\"\" return self def workspace(self):", "to ease testing of i3ipc.aio.Con serialization methods, which are mokey", "workspace(self): \"\"\"Return the containing workspace\"\"\" return self class MockI3(Mock): \"\"\"Mock", "this, the mock inherits all the method implementations of i3ipc.aio.Con,", "properties, by returning `self` when an i3ipc.aio.Con instance is needed", "of simplicity. \"\"\" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def", "containing workspace\"\"\" return self class MockI3(Mock): \"\"\"Mock an i3ipc.aio.Connection\"\"\" def", "the sake of simplicity. \"\"\" def __init__(self, *args, **kwargs): super().__init__(*args,", "**kwargs): Mock.__init__(self, *args, **kwargs) self.focused = focused self.layout = layout", "the constructor, in order to ease the creation of mock", "\"\"\" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def find_focused(self): \"\"\"Return", "injected in the constructor, in order to ease the creation", "\"\"\"Mock an i3ipc.aio.Con for navigation purposes This Mock is meant", "class MockConSerializer(Mock, Con): \"\"\"Mock a generic i3ipc.aio.Con for serialization purposes", "for serialization, both properties and methods, is mocked and can", "returning `self` when an i3ipc.aio.Con instance is needed for the", "is needed for serialization, both properties and methods, is mocked", "self.focused = focused self.layout = layout self.name = name self.nodes", "Con): \"\"\"Mock a generic i3ipc.aio.Con for serialization purposes This Mock", "can be injected in the constructor, in order to ease", "mock instances. \"\"\" def __init__(self, *args, name=None, layout=None, focused=False, nodes=iter(()),", "focused self.layout = layout self.name = name self.nodes = nodes", "This Mock is meant to be used when testing i3ipc", "whatever is needed for serialization, both properties and methods, is", "purposes This Mock is meant to be used when testing", "methods and properties, by returning `self` when an i3ipc.aio.Con instance", "event handlers. It mocks all the necessary methods and properties,", "navigation purposes This Mock is meant to be used when", "instance is needed for the sake of simplicity. \"\"\" def", "*args, **kwargs): super().__init__(*args, **kwargs) def find_focused(self): \"\"\"Return the focused window\"\"\"", "MockConSerializer(Mock, Con): \"\"\"Mock a generic i3ipc.aio.Con for serialization purposes This", "needed for the sake of simplicity. \"\"\" def __init__(self, *args,", "methods, is mocked and can be injected in the constructor,", "of mock instances. \"\"\" def __init__(self, *args, name=None, layout=None, focused=False,", "from i3ipc.aio import Con import i3_live_tree.tree_serializer # noqa: F401 class", "super().__init__(*args, **kwargs) def find_focused(self): \"\"\"Return the focused window\"\"\" return self", "both properties and methods, is mocked and can be injected", "i3ipc.aio.Connection\"\"\" def __init__(self, *args, tree, **kwargs): super().__init__(*args, **kwargs) self.tree =", "import Con import i3_live_tree.tree_serializer # noqa: F401 class MockConSerializer(Mock, Con):", "class MockConNavigation(MagicMock): \"\"\"Mock an i3ipc.aio.Con for navigation purposes This Mock", "However, whatever is needed for serialization, both properties and methods,", "importantly the serialization ones. However, whatever is needed for serialization,", "nodes class MockConNavigation(MagicMock): \"\"\"Mock an i3ipc.aio.Con for navigation purposes This", "of i3ipc.aio.Con serialization methods, which are mokey patched in i3_live_tree.tree_serializer.", "an i3ipc.aio.Con instance is needed for the sake of simplicity.", "i3ipc.aio.Con serialization methods, which are mokey patched in i3_live_tree.tree_serializer. In", "is mocked and can be injected in the constructor, in", "layout self.name = name self.nodes = nodes class MockConNavigation(MagicMock): \"\"\"Mock", "for navigation purposes This Mock is meant to be used", "when testing i3ipc event handlers. It mocks all the necessary", "= nodes class MockConNavigation(MagicMock): \"\"\"Mock an i3ipc.aio.Con for navigation purposes", "which are mokey patched in i3_live_tree.tree_serializer. In order to achieve", "*args, name=None, layout=None, focused=False, nodes=iter(()), **kwargs): Mock.__init__(self, *args, **kwargs) self.focused", "return self class MockI3(Mock): \"\"\"Mock an i3ipc.aio.Connection\"\"\" def __init__(self, *args,", "a generic i3ipc.aio.Con for serialization purposes This Mock is meant", "It mocks all the necessary methods and properties, by returning", "the mock inherits all the method implementations of i3ipc.aio.Con, most", "**kwargs): super().__init__(*args, **kwargs) self.tree = tree async def get_tree(self): \"\"\"Return", "MagicMock, Mock from i3ipc.aio import Con import i3_live_tree.tree_serializer # noqa:", "and methods, is mocked and can be injected in the", "*args, **kwargs) self.focused = focused self.layout = layout self.name =", "async def get_tree(self): \"\"\"Return the i3 tree asynchronously\"\"\" return self.tree", "`self` when an i3ipc.aio.Con instance is needed for the sake", "method implementations of i3ipc.aio.Con, most importantly the serialization ones. However,", "**kwargs) self.tree = tree async def get_tree(self): \"\"\"Return the i3", "In order to achieve this, the mock inherits all the", "focused window\"\"\" return self def workspace(self): \"\"\"Return the containing workspace\"\"\"", "workspace\"\"\" return self class MockI3(Mock): \"\"\"Mock an i3ipc.aio.Connection\"\"\" def __init__(self,", "order to achieve this, the mock inherits all the method", "= tree async def get_tree(self): \"\"\"Return the i3 tree asynchronously\"\"\"", "i3ipc.aio import Con import i3_live_tree.tree_serializer # noqa: F401 class MockConSerializer(Mock,", "__init__(self, *args, tree, **kwargs): super().__init__(*args, **kwargs) self.tree = tree async", "layout=None, focused=False, nodes=iter(()), **kwargs): Mock.__init__(self, *args, **kwargs) self.focused = focused", "i3ipc.aio.Con, most importantly the serialization ones. However, whatever is needed", "generic i3ipc.aio.Con for serialization purposes This Mock is meant to", "MockI3(Mock): \"\"\"Mock an i3ipc.aio.Connection\"\"\" def __init__(self, *args, tree, **kwargs): super().__init__(*args,", "the necessary methods and properties, by returning `self` when an", "Mock is meant to be used when testing i3ipc event", "def __init__(self, *args, name=None, layout=None, focused=False, nodes=iter(()), **kwargs): Mock.__init__(self, *args,", "necessary methods and properties, by returning `self` when an i3ipc.aio.Con", "simplicity. \"\"\" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def find_focused(self):", "class MockI3(Mock): \"\"\"Mock an i3ipc.aio.Connection\"\"\" def __init__(self, *args, tree, **kwargs):", "return self def workspace(self): \"\"\"Return the containing workspace\"\"\" return self", "be injected in the constructor, in order to ease the", "constructor, in order to ease the creation of mock instances.", "is meant to be used when testing i3ipc event handlers.", "and can be injected in the constructor, in order to", "mocked and can be injected in the constructor, in order", "**kwargs) self.focused = focused self.layout = layout self.name = name", "super().__init__(*args, **kwargs) self.tree = tree async def get_tree(self): \"\"\"Return the", "\"\"\" def __init__(self, *args, name=None, layout=None, focused=False, nodes=iter(()), **kwargs): Mock.__init__(self,", "is needed for the sake of simplicity. \"\"\" def __init__(self,", "most importantly the serialization ones. However, whatever is needed for", "\"\"\"Return the containing workspace\"\"\" return self class MockI3(Mock): \"\"\"Mock an", "testing of i3ipc.aio.Con serialization methods, which are mokey patched in", "to achieve this, the mock inherits all the method implementations", "i3ipc.aio.Con for serialization purposes This Mock is meant to ease", "unittest.mock import MagicMock, Mock from i3ipc.aio import Con import i3_live_tree.tree_serializer", "inherits all the method implementations of i3ipc.aio.Con, most importantly the", "the focused window\"\"\" return self def workspace(self): \"\"\"Return the containing", "# noqa: F401 class MockConSerializer(Mock, Con): \"\"\"Mock a generic i3ipc.aio.Con", "implementations of i3ipc.aio.Con, most importantly the serialization ones. However, whatever", "self.tree = tree async def get_tree(self): \"\"\"Return the i3 tree", "methods, which are mokey patched in i3_live_tree.tree_serializer. In order to", "serialization methods, which are mokey patched in i3_live_tree.tree_serializer. In order", "import MagicMock, Mock from i3ipc.aio import Con import i3_live_tree.tree_serializer #", "= layout self.name = name self.nodes = nodes class MockConNavigation(MagicMock):", "self.nodes = nodes class MockConNavigation(MagicMock): \"\"\"Mock an i3ipc.aio.Con for navigation", "**kwargs) def find_focused(self): \"\"\"Return the focused window\"\"\" return self def", "testing i3ipc event handlers. It mocks all the necessary methods", "= focused self.layout = layout self.name = name self.nodes =", "**kwargs): super().__init__(*args, **kwargs) def find_focused(self): \"\"\"Return the focused window\"\"\" return", "and properties, by returning `self` when an i3ipc.aio.Con instance is", "\"\"\"Return the focused window\"\"\" return self def workspace(self): \"\"\"Return the", "used when testing i3ipc event handlers. It mocks all the", "Mock.__init__(self, *args, **kwargs) self.focused = focused self.layout = layout self.name", "serialization, both properties and methods, is mocked and can be", "are mokey patched in i3_live_tree.tree_serializer. In order to achieve this,", "mokey patched in i3_live_tree.tree_serializer. In order to achieve this, the", "patched in i3_live_tree.tree_serializer. In order to achieve this, the mock", "MockConNavigation(MagicMock): \"\"\"Mock an i3ipc.aio.Con for navigation purposes This Mock is", "meant to ease testing of i3ipc.aio.Con serialization methods, which are", "__init__(self, *args, name=None, layout=None, focused=False, nodes=iter(()), **kwargs): Mock.__init__(self, *args, **kwargs)", "mocks all the necessary methods and properties, by returning `self`", "ease the creation of mock instances. \"\"\" def __init__(self, *args,", "\"\"\"Mock an i3ipc.aio.Connection\"\"\" def __init__(self, *args, tree, **kwargs): super().__init__(*args, **kwargs)", "tree, **kwargs): super().__init__(*args, **kwargs) self.tree = tree async def get_tree(self):", "\"\"\"Mock a generic i3ipc.aio.Con for serialization purposes This Mock is", "def __init__(self, *args, tree, **kwargs): super().__init__(*args, **kwargs) self.tree = tree", "to ease the creation of mock instances. \"\"\" def __init__(self,", "= name self.nodes = nodes class MockConNavigation(MagicMock): \"\"\"Mock an i3ipc.aio.Con", "in the constructor, in order to ease the creation of", "self.layout = layout self.name = name self.nodes = nodes class", "This Mock is meant to ease testing of i3ipc.aio.Con serialization", "the method implementations of i3ipc.aio.Con, most importantly the serialization ones.", "of i3ipc.aio.Con, most importantly the serialization ones. However, whatever is", "the serialization ones. However, whatever is needed for serialization, both", "def workspace(self): \"\"\"Return the containing workspace\"\"\" return self class MockI3(Mock):", "import i3_live_tree.tree_serializer # noqa: F401 class MockConSerializer(Mock, Con): \"\"\"Mock a", "ease testing of i3ipc.aio.Con serialization methods, which are mokey patched", "__init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def find_focused(self): \"\"\"Return the focused", "Mock is meant to ease testing of i3ipc.aio.Con serialization methods,", "i3_live_tree.tree_serializer. In order to achieve this, the mock inherits all", "noqa: F401 class MockConSerializer(Mock, Con): \"\"\"Mock a generic i3ipc.aio.Con for", "creation of mock instances. \"\"\" def __init__(self, *args, name=None, layout=None,", "serialization purposes This Mock is meant to ease testing of", "achieve this, the mock inherits all the method implementations of", "mock inherits all the method implementations of i3ipc.aio.Con, most importantly", "F401 class MockConSerializer(Mock, Con): \"\"\"Mock a generic i3ipc.aio.Con for serialization", "meant to be used when testing i3ipc event handlers. It", "*args, tree, **kwargs): super().__init__(*args, **kwargs) self.tree = tree async def", "i3ipc.aio.Con for navigation purposes This Mock is meant to be", "all the necessary methods and properties, by returning `self` when", "handlers. It mocks all the necessary methods and properties, by", "self.name = name self.nodes = nodes class MockConNavigation(MagicMock): \"\"\"Mock an", "to be used when testing i3ipc event handlers. It mocks", "Con import i3_live_tree.tree_serializer # noqa: F401 class MockConSerializer(Mock, Con): \"\"\"Mock", "from unittest.mock import MagicMock, Mock from i3ipc.aio import Con import", "name=None, layout=None, focused=False, nodes=iter(()), **kwargs): Mock.__init__(self, *args, **kwargs) self.focused =", "self class MockI3(Mock): \"\"\"Mock an i3ipc.aio.Connection\"\"\" def __init__(self, *args, tree,", "ones. However, whatever is needed for serialization, both properties and", "purposes This Mock is meant to ease testing of i3ipc.aio.Con", "focused=False, nodes=iter(()), **kwargs): Mock.__init__(self, *args, **kwargs) self.focused = focused self.layout", "window\"\"\" return self def workspace(self): \"\"\"Return the containing workspace\"\"\" return", "when an i3ipc.aio.Con instance is needed for the sake of", "the creation of mock instances. \"\"\" def __init__(self, *args, name=None,", "the containing workspace\"\"\" return self class MockI3(Mock): \"\"\"Mock an i3ipc.aio.Connection\"\"\"", "all the method implementations of i3ipc.aio.Con, most importantly the serialization", "is meant to ease testing of i3ipc.aio.Con serialization methods, which", "be used when testing i3ipc event handlers. It mocks all", "serialization ones. However, whatever is needed for serialization, both properties", "for serialization purposes This Mock is meant to ease testing", "by returning `self` when an i3ipc.aio.Con instance is needed for", "i3ipc event handlers. It mocks all the necessary methods and", "in i3_live_tree.tree_serializer. In order to achieve this, the mock inherits", "def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def find_focused(self): \"\"\"Return the", "sake of simplicity. \"\"\" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs)", "properties and methods, is mocked and can be injected in", "instances. \"\"\" def __init__(self, *args, name=None, layout=None, focused=False, nodes=iter(()), **kwargs):", "an i3ipc.aio.Con for navigation purposes This Mock is meant to", "needed for serialization, both properties and methods, is mocked and", "Mock from i3ipc.aio import Con import i3_live_tree.tree_serializer # noqa: F401", "order to ease the creation of mock instances. \"\"\" def", "an i3ipc.aio.Connection\"\"\" def __init__(self, *args, tree, **kwargs): super().__init__(*args, **kwargs) self.tree", "for the sake of simplicity. \"\"\" def __init__(self, *args, **kwargs):", "i3ipc.aio.Con instance is needed for the sake of simplicity. \"\"\"", "find_focused(self): \"\"\"Return the focused window\"\"\" return self def workspace(self): \"\"\"Return", "self def workspace(self): \"\"\"Return the containing workspace\"\"\" return self class", "name self.nodes = nodes class MockConNavigation(MagicMock): \"\"\"Mock an i3ipc.aio.Con for", "i3_live_tree.tree_serializer # noqa: F401 class MockConSerializer(Mock, Con): \"\"\"Mock a generic" ]
[ "mount_point=DEFAULT_MOUNT_POINT): if endpoint not in ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid endpoint", "'/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point, endpoint=endpoint, roleset=roleset, ) response = self._adapter.get( url=api_path )", "provided \"{arg}\", supported types: \"{allowed_endpoints}\"' raise exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS),", "Gcp(VaultApiBase): def generate_credentials(self, roleset, endpoint='key', mount_point=DEFAULT_MOUNT_POINT): if endpoint not in", "VaultApiBase from hvac.constants.gcp import DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS class Gcp(VaultApiBase): def generate_credentials(self,", "supported types: \"{allowed_endpoints}\"' raise exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), )) api_path", "hvac.constants.gcp import DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS class Gcp(VaultApiBase): def generate_credentials(self, roleset, endpoint='key',", "import DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS class Gcp(VaultApiBase): def generate_credentials(self, roleset, endpoint='key', mount_point=DEFAULT_MOUNT_POINT):", "from hvac import exceptions from hvac.api.vault_api_base import VaultApiBase from hvac.constants.gcp", "generate_credentials(self, roleset, endpoint='key', mount_point=DEFAULT_MOUNT_POINT): if endpoint not in ALLOWED_CREDS_ENDPOINTS: error_msg", "types: \"{allowed_endpoints}\"' raise exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), )) api_path =", "exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), )) api_path = '/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point, endpoint=endpoint,", "ALLOWED_CREDS_ENDPOINTS class Gcp(VaultApiBase): def generate_credentials(self, roleset, endpoint='key', mount_point=DEFAULT_MOUNT_POINT): if endpoint", "utf-8 -*- \"\"\"Gcp methods module.\"\"\" from hvac import exceptions from", "from hvac.constants.gcp import DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS class Gcp(VaultApiBase): def generate_credentials(self, roleset,", "hvac.api.vault_api_base import VaultApiBase from hvac.constants.gcp import DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS class Gcp(VaultApiBase):", "ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid endpoint argument provided \"{arg}\", supported types:", "coding: utf-8 -*- \"\"\"Gcp methods module.\"\"\" from hvac import exceptions", "hvac import exceptions from hvac.api.vault_api_base import VaultApiBase from hvac.constants.gcp import", "# -*- coding: utf-8 -*- \"\"\"Gcp methods module.\"\"\" from hvac", "in ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid endpoint argument provided \"{arg}\", supported", "import VaultApiBase from hvac.constants.gcp import DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS class Gcp(VaultApiBase): def", "def generate_credentials(self, roleset, endpoint='key', mount_point=DEFAULT_MOUNT_POINT): if endpoint not in ALLOWED_CREDS_ENDPOINTS:", "error_msg = 'invalid endpoint argument provided \"{arg}\", supported types: \"{allowed_endpoints}\"'", "exceptions from hvac.api.vault_api_base import VaultApiBase from hvac.constants.gcp import DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS", "DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS class Gcp(VaultApiBase): def generate_credentials(self, roleset, endpoint='key', mount_point=DEFAULT_MOUNT_POINT): if", "class Gcp(VaultApiBase): def generate_credentials(self, roleset, endpoint='key', mount_point=DEFAULT_MOUNT_POINT): if endpoint not", "-*- coding: utf-8 -*- \"\"\"Gcp methods module.\"\"\" from hvac import", "argument provided \"{arg}\", supported types: \"{allowed_endpoints}\"' raise exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=',", "\"{arg}\", supported types: \"{allowed_endpoints}\"' raise exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), ))", "#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\"Gcp methods module.\"\"\"", "if endpoint not in ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid endpoint argument", "module.\"\"\" from hvac import exceptions from hvac.api.vault_api_base import VaultApiBase from", "import exceptions from hvac.api.vault_api_base import VaultApiBase from hvac.constants.gcp import DEFAULT_MOUNT_POINT,", "= '/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point, endpoint=endpoint, roleset=roleset, ) response = self._adapter.get( url=api_path", "mount_point=mount_point, endpoint=endpoint, roleset=roleset, ) response = self._adapter.get( url=api_path ) return", "endpoint argument provided \"{arg}\", supported types: \"{allowed_endpoints}\"' raise exceptions.ParamValidationError(error_msg.format( arg=endpoint,", "\"\"\"Gcp methods module.\"\"\" from hvac import exceptions from hvac.api.vault_api_base import", "endpoint not in ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid endpoint argument provided", "roleset, endpoint='key', mount_point=DEFAULT_MOUNT_POINT): if endpoint not in ALLOWED_CREDS_ENDPOINTS: error_msg =", "endpoint='key', mount_point=DEFAULT_MOUNT_POINT): if endpoint not in ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid", "= 'invalid endpoint argument provided \"{arg}\", supported types: \"{allowed_endpoints}\"' raise", ")) api_path = '/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point, endpoint=endpoint, roleset=roleset, ) response =", "\"{allowed_endpoints}\"' raise exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), )) api_path = '/v1/{mount_point}/{endpoint}/{roleset}'.format(", "endpoint=endpoint, roleset=roleset, ) response = self._adapter.get( url=api_path ) return response.json()", "python # -*- coding: utf-8 -*- \"\"\"Gcp methods module.\"\"\" from", "-*- \"\"\"Gcp methods module.\"\"\" from hvac import exceptions from hvac.api.vault_api_base", "raise exceptions.ParamValidationError(error_msg.format( arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), )) api_path = '/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point,", "allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), )) api_path = '/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point, endpoint=endpoint, roleset=roleset, )", "methods module.\"\"\" from hvac import exceptions from hvac.api.vault_api_base import VaultApiBase", "from hvac.api.vault_api_base import VaultApiBase from hvac.constants.gcp import DEFAULT_MOUNT_POINT, ALLOWED_CREDS_ENDPOINTS class", "'invalid endpoint argument provided \"{arg}\", supported types: \"{allowed_endpoints}\"' raise exceptions.ParamValidationError(error_msg.format(", "arg=endpoint, allowed_endpoints=', '.join(ALLOWED_CREDS_ENDPOINTS), )) api_path = '/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point, endpoint=endpoint, roleset=roleset,", "api_path = '/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point, endpoint=endpoint, roleset=roleset, ) response = self._adapter.get(", "not in ALLOWED_CREDS_ENDPOINTS: error_msg = 'invalid endpoint argument provided \"{arg}\",", "'.join(ALLOWED_CREDS_ENDPOINTS), )) api_path = '/v1/{mount_point}/{endpoint}/{roleset}'.format( mount_point=mount_point, endpoint=endpoint, roleset=roleset, ) response" ]
[ "= ypricemagic.magic.get_price(tokens[0], block) except ypricemagic.magic.PriceError: price0 is None if not", "you need, it's likely because that token requires a special", "try to get from elsewhere if not price0: try: price0", "[token_in, token_out] elif cake in (token_in, token_out): path = [token_in,", "as e: pass @memory.cache() def is_uniswap_pool(address): try: return Contract(address).factory() in", "= block) token1 = pair.token1(block_identifier = block) supply = pair.totalSupply(block_identifier", "import ypricemagic.utils.utils from .constants import STABLECOINS, dai, usdc, usdt, wbtc,", "chain.id == 56: #bsc from .constants import cake, wbnb if", "[token_in, weth, token_out] fees = 0.997 ** (len(path) - 1)", "= Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out = busd tokens = [str(token) for token", "swap path. # Please add a viable swap path below", "\"pancakeswapv1\": { } } elif chain.id == 137: ROUTERS =", "except ypricemagic.magic.PriceError: price0 is None if not price1: try: price1", "else: factory = pair.factory(block_identifier = block) token0 = pair.token0(block_identifier =", "= [None,None] # [res / scale * price for res,", "in FACTORY_TO_ROUTER except (ValueError, OverflowError, AttributeError): pass return False @ttl_cache(ttl=600)", "def is_uniswap_pool(address): try: return Contract(address).factory() in FACTORY_TO_ROUTER except (ValueError, OverflowError,", "} SPECIAL_PATHS = { \"pancakeswapv2\": { }, \"pancakeswapv1\": { }", "[token_in,sushi,weth,token_out] elif str(token_in) in SPECIAL_PATHS[router].keys() and str(token_out) in STABLECOINS: path", "} SPECIAL_PATHS = { \"quickswap\": { } } FACTORY_TO_ROUTER =", "FACTORIES = { \"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", } ROUTERS =", "except (ContractNotFound, ValueError) as e: pass @memory.cache() def is_uniswap_pool(address): try:", "[token_in, token_out] else: path = [token_in,wmatic,token_out] else: path = [token_in,", "balances[1] = reserves[1] / scales[1] * price1 balances = extrapolate_balance_if_needed()", "supply / 1e18 try: balances = [res / scale *", "from cachetools.func import ttl_cache from .utils.cache import memory from .utils.multicall2", "ROUTERS = { \"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") } FACTORIES =", "extrapolate_balance_if_needed(): nonlocal balances if balances[0] and not balances[1]: balances[1] =", "(ValueError, OverflowError, AttributeError): pass return False @ttl_cache(ttl=600) def lp_price(address, block=None):", "(token_in, token_out): path = [token_in, token_out] else: path = [token_in,wmatic,token_out]", "because that token requires a special swap path. # Please", "[token_in, token_out] else: path = [token_in,wbnb,token_out] elif chain.id == 137:", "= [ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for token in [token0, token1]] price0 = get_price(tokens[0],", "If this is failing to pull a price for a", "path = [token_in, weth, token_out] fees = 0.997 ** (len(path)", "available. \"\"\" if chain.id == 56 and token_out == usdc:", "in [token_in, token_out]] amount_in = 10 ** ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if str(token_in)", "@memory.cache() def is_uniswap_pool(address): try: return Contract(address).factory() in FACTORY_TO_ROUTER except (ValueError,", "= get_price(tokens[0], paired_against=tokens[1], router=router, block=block) price1 = get_price(tokens[1], paired_against=tokens[0], router=router,", "TypeError as e: # If can't get price via router,", "= ROUTERS[router] try: quote = router.getAmountsOut(amount_in, path, block_identifier=block) amount_out =", "= [token_in,wbnb,token_out] elif chain.id == 137: #bsc from .constants import", "except TypeError as e: # If can't get price via", "= exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block) / 1e6 fees = 0.997 ** 2", "path available. \"\"\" if chain.id == 56 and token_out ==", "paired_against] elif weth in (token_in, token_out): path = [token_in, token_out]", "= [token_in, token_out] else: path = [token_in,wmatic,token_out] else: path =", "/ 10 ** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return amount_out / fees except ValueError", "fees except (ContractNotFound, ValueError) as e: pass @memory.cache() def is_uniswap_pool(address):", "supply = supply / 1e18 try: balances = [res /", "1) if router in ROUTERS: router = ROUTERS[router] try: quote", "for res, scale, price in zip(reserves, scales, prices)] if price0:", "brownie.exceptions import ContractNotFound from cachetools.func import ttl_cache from .utils.cache import", "{ \"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") } FACTORIES = { \"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", }", "is None prices = [price0,price1] balances = [None,None] # [res", "as e: # If can't get price via router, try", "path = [token_in, paired_against] elif weth in (token_in, token_out): path", "[price0,price1] balances = [None,None] # [res / scale * price", "block) reserves = pair.getReserves(block_identifier = block) router = FACTORY_TO_PROTOCOL[factory] tokens", "reserves[1] / scales[1] * price1 balances = extrapolate_balance_if_needed() try: return", "fetch price data successfully. #project.load() if chain.id == 1: FACTORIES", "if wbnb in (token_in, token_out): path = [token_in, token_out] elif", "} elif chain.id == 56: ROUTERS = { \"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"),", "} SPECIAL_PATHS = { \"sushiswap\": { \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc]", "1e6 fees = 0.997 ** 2 return usdc_bought / fees", "= pair.getReserves(block_identifier = block) router = FACTORY_TO_PROTOCOL[factory] tokens = [ypricemagic.utils.utils.Contract_with_erc20_fallback(token)", "def get_price(token_in, token_out=usdc, router=\"uniswap\", block=None, paired_against=weth): \"\"\" Calculate a price", "\"\"\" Calculate a price based on Uniswap Router quote for", "[\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\":", "SPECIAL_PATHS = { \"quickswap\": { } } FACTORY_TO_ROUTER = {FACTORIES[name]:", "brownie import Contract, chain from brownie.exceptions import ContractNotFound from cachetools.func", "= get_price(tokens[1], paired_against=tokens[0], router=router, block=block) prices = [price0,price1] scales =", "ContractNotFound from cachetools.func import ttl_cache from .utils.cache import memory from", "except ypricemagic.magic.PriceError: price1 is None prices = [price0,price1] balances =", "/ scales[1] * price1 balances = extrapolate_balance_if_needed() try: return sum(balances)", "[ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for token in [token0, token1]] price0 = get_price(tokens[0], paired_against=tokens[1],", "FACTORIES} FACTORY_TO_PROTOCOL = {FACTORIES[name]: name for name in FACTORIES} @ttl_cache(ttl=36000)", "else: path = [token_in, weth, token_out] fees = 0.997 **", "wbnb if wbnb in (token_in, token_out): path = [token_in, token_out]", "= exchange.getTokenToEthInputPrice(10 ** ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block) exchange = Contract(factory.getExchange(usdc)) usdc_bought =", "FACTORY_TO_ROUTER = {FACTORIES[name]: ROUTERS[name] for name in FACTORIES} FACTORY_TO_PROTOCOL =", "if chain.id == 56 and token_out == usdc: busd =", "STABLECOINS and str(token_out) in STABLECOINS: path = [token_in, paired_against] elif", "pair = Contract(address) if chain.id not in [56, 137]: #", "pair.token1(block_identifier = block) supply = pair.totalSupply(block_identifier = block) reserves =", "} } FACTORY_TO_ROUTER = {FACTORIES[name]: ROUTERS[name] for name in FACTORIES}", "sushi and token_out != sushi: path = [token_in,sushi,weth,token_out] elif str(token_in)", "Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") } FACTORIES = { \"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" }", "No multicall2 on bsc or poly factory, token0, token1, supply,", "tokens] supply = supply / 1e18 try: balances = [res", "cachetools.func import ttl_cache from .utils.cache import memory from .utils.multicall2 import", "from .constants import STABLECOINS, dai, usdc, usdt, wbtc, weth, sushi", "chain.id == 137: #bsc from .constants import wmatic if wmatic", "FACTORIES} @ttl_cache(ttl=36000) def get_price(token_in, token_out=usdc, router=\"uniswap\", block=None, paired_against=weth): \"\"\" Calculate", "via router, try to get from elsewhere if not price0:", "based on Uniswap Router quote for selling one `token_in`. Always", "path below to fetch price data successfully. #project.load() if chain.id", "balances[0] and not balances[1]: balances[1] = balances[0] if balances[1] and", "token_out]] amount_in = 10 ** ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if str(token_in) in STABLECOINS:", "\"token0\"], [pair, \"token1\"], [pair, \"totalSupply\"], [pair, \"getReserves\"], block=block ) else:", "wmatic in (token_in, token_out): path = [token_in, token_out] else: path", "pass return False @ttl_cache(ttl=600) def lp_price(address, block=None): \"\"\" Get Uniswap/Sushiswap", "block_identifier=block) / 1e6 fees = 0.997 ** 2 return usdc_bought", "token you need, it's likely because that token requires a", "path, block_identifier=block) amount_out = quote[-1] / 10 ** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return", "Uniswap/Sushiswap LP token price. \"\"\" def extrapolate_balance_if_needed(): nonlocal balances if", "token_out): path = [token_in, token_out] elif cake in (token_in, token_out):", "block_identifier=block) exchange = Contract(factory.getExchange(usdc)) usdc_bought = exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block) / 1e6", "/ scale * price for res, scale, price in zip(reserves,", "pair.factory(block_identifier = block) token0 = pair.token0(block_identifier = block) token1 =", "= block) router = FACTORY_TO_PROTOCOL[factory] tokens = [ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for token", "in FACTORIES} FACTORY_TO_PROTOCOL = {FACTORIES[name]: name for name in FACTORIES}", "return Contract(address).factory() in FACTORY_TO_ROUTER except (ValueError, OverflowError, AttributeError): pass return", "price0 is None if not price1: try: price1 = ypricemagic.magic.get_price(tokens[1],", ",\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] }, \"uniswap\": { } } elif", "pass @memory.cache() def is_uniswap_pool(address): try: return Contract(address).factory() in FACTORY_TO_ROUTER except", "factory = pair.factory(block_identifier = block) token0 = pair.token0(block_identifier = block)", "= Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try: exchange = Contract(factory.getExchange(asset)) eth_bought = exchange.getTokenToEthInputPrice(10 **", "} } elif chain.id == 56: ROUTERS = { \"pancakeswapv2\":", "OverflowError, AttributeError): pass return False @ttl_cache(ttl=600) def lp_price(address, block=None): \"\"\"", "chain.id == 56 and token_out == usdc: busd = Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\")", "wmatic if wmatic in (token_in, token_out): path = [token_in, token_out]", "== 137: ROUTERS = { \"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") } FACTORIES =", "= balances[0] if balances[1] and not balances[0]: balances[0] = balances[1]", "\"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") } FACTORIES = { \"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\"", "memory from .utils.multicall2 import fetch_multicall from .interfaces.ERC20 import ERC20ABI import", "import wmatic if wmatic in (token_in, token_out): path = [token_in,", "{ }, \"pancakeswapv1\": { } } elif chain.id == 137:", "Router quote for selling one `token_in`. Always uses intermediate WETH", "= [token_in,wmatic,token_out] else: path = [token_in, weth, token_out] fees =", "if not price1: try: price1 = ypricemagic.magic.get_price(tokens[1], block) except ypricemagic.magic.PriceError:", "amount_out / fees except ValueError as e: return @ttl_cache(ttl=600) def", "- 1) if router in ROUTERS: router = ROUTERS[router] try:", "e: return @ttl_cache(ttl=600) def get_price_v1(asset, block=None): factory = Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try:", "price in zip(reserves, scales, prices)] if price0: balances[0] = reserves[0]", "= { \"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), } SPECIAL_PATHS = {", "scales[0] * price0 if price1: balances[1] = reserves[1] / scales[1]", "that token requires a special swap path. # Please add", "\"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", } ROUTERS = { \"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"),", "elif weth in (token_in, token_out): path = [token_in, token_out] elif", "zip(reserves, scales, prices)] except TypeError as e: # If can't", "[\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] }, \"uniswap\": { }", "block) except ypricemagic.magic.PriceError: price0 is None if not price1: try:", "= { \"sushiswap\": { \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc]", "SPECIAL_PATHS[router][str(token_in)] elif chain.id == 56: #bsc from .constants import cake,", "\"pancakeswapv2\": { }, \"pancakeswapv1\": { } } elif chain.id ==", "sushi # NOTE: If this is failing to pull a", "[\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc]", "token_out] elif paired_against == sushi and token_out != sushi: path", ",\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] },", "\"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" } SPECIAL_PATHS = { \"pancakeswapv2\": { },", "in (token_in, token_out): path = [token_in, token_out] elif paired_against ==", "wbtc, weth, sushi # NOTE: If this is failing to", "[str(token) for token in [token_in, token_out]] amount_in = 10 **", "ROUTERS = { \"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") } FACTORIES = { \"quickswap\":", "path = [token_in, token_out] else: path = [token_in,wbnb,token_out] elif chain.id", "tokens = [str(token) for token in [token_in, token_out]] amount_in =", "block) token0 = pair.token0(block_identifier = block) token1 = pair.token1(block_identifier =", "* price1 balances = extrapolate_balance_if_needed() try: return sum(balances) / supply", "Calculate a price based on Uniswap Router quote for selling", "lp_price(address, block=None): \"\"\" Get Uniswap/Sushiswap LP token price. \"\"\" def", "Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), } SPECIAL_PATHS = { \"sushiswap\": { \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\":", "balances[1] = balances[0] if balances[1] and not balances[0]: balances[0] =", "from brownie.exceptions import ContractNotFound from cachetools.func import ttl_cache from .utils.cache", "e: pass @memory.cache() def is_uniswap_pool(address): try: return Contract(address).factory() in FACTORY_TO_ROUTER", "from .utils.cache import memory from .utils.multicall2 import fetch_multicall from .interfaces.ERC20", "in (token_in, token_out): path = [token_in, token_out] elif cake in", "except (ValueError, OverflowError, AttributeError): pass return False @ttl_cache(ttl=600) def lp_price(address,", "{ \"sushiswap\": { \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\":", "busd tokens = [str(token) for token in [token_in, token_out]] amount_in", "if str(token_in) in STABLECOINS: return 1 elif str(paired_against) in STABLECOINS", "\"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc]", "token_out] else: path = [token_in,wmatic,token_out] else: path = [token_in, weth,", "Contract(factory.getExchange(usdc)) usdc_bought = exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block) / 1e6 fees = 0.997", "usdc_bought = exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block) / 1e6 fees = 0.997 **", "\"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", } SPECIAL_PATHS = { \"quickswap\": { } } FACTORY_TO_ROUTER", "block=block ) else: factory = pair.factory(block_identifier = block) token0 =", "token in tokens] supply = supply / 1e18 try: balances", "= { \"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") } FACTORIES = { \"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\",", "= [token_in, token_out] elif paired_against == sushi and token_out !=", "factory, token0, token1, supply, reserves = fetch_multicall( [pair, \"factory\"], [pair,", "not price1: try: price1 = ypricemagic.magic.get_price(tokens[1], block) except ypricemagic.magic.PriceError: price1", "path. # Please add a viable swap path below to", "`token_in`. Always uses intermediate WETH pair if `[token_in,weth,token_out]` swap path", "token_out=usdc, router=\"uniswap\", block=None, paired_against=weth): \"\"\" Calculate a price based on", "for name in FACTORIES} @ttl_cache(ttl=36000) def get_price(token_in, token_out=usdc, router=\"uniswap\", block=None,", "ypricemagic.utils.utils.get_decimals_with_override(str(token)) for token in tokens] supply = supply / 1e18", "this is failing to pull a price for a token", "56: ROUTERS = { \"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") } FACTORIES", "block) supply = pair.totalSupply(block_identifier = block) reserves = pair.getReserves(block_identifier =", "= { \"pancakeswapv2\": { }, \"pancakeswapv1\": { } } elif", "in (token_in, token_out): path = [token_in, token_out] else: path =", ".constants import wmatic if wmatic in (token_in, token_out): path =", "exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block) / 1e6 fees = 0.997 ** 2 return", "bsc or poly factory, token0, token1, supply, reserves = fetch_multicall(", "#bsc from .constants import cake, wbnb if wbnb in (token_in,", "price0 = ypricemagic.magic.get_price(tokens[0], block) except ypricemagic.magic.PriceError: price0 is None if", "= extrapolate_balance_if_needed() try: return sum(balances) / supply except TypeError: return", "for a token you need, it's likely because that token", "scale, price in zip(reserves, scales, prices)] except TypeError as e:", "if price1: balances[1] = reserves[1] / scales[1] * price1 balances", "= balances[1] return balances pair = Contract(address) if chain.id not", "prices)] if price0: balances[0] = reserves[0] / scales[0] * price0", "[pair, \"getReserves\"], block=block ) else: factory = pair.factory(block_identifier = block)", "price1 is None prices = [price0,price1] balances = [None,None] #", "price for res, scale, price in zip(reserves, scales, prices)] if", "/ fees except (ContractNotFound, ValueError) as e: pass @memory.cache() def", "= quote[-1] / 10 ** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return amount_out / fees", "[None,None] # [res / scale * price for res, scale,", "block) router = FACTORY_TO_PROTOCOL[factory] tokens = [ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for token in", "in tokens] supply = supply / 1e18 try: balances =", "FACTORIES = { \"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", } SPECIAL_PATHS = { \"quickswap\":", "[token_in, token_out]] amount_in = 10 ** ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if str(token_in) in", "scales = [10 ** ypricemagic.utils.utils.get_decimals_with_override(str(token)) for token in tokens] supply", "eth_bought = exchange.getTokenToEthInputPrice(10 ** ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block) exchange = Contract(factory.getExchange(usdc)) usdc_bought", "= 0.997 ** 2 return usdc_bought / fees except (ContractNotFound,", "\"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" } SPECIAL_PATHS = { \"pancakeswapv2\": {", "= [10 ** ypricemagic.utils.utils.get_decimals_with_override(str(token)) for token in tokens] supply =", "usdc, usdt, wbtc, weth, sushi # NOTE: If this is", "if balances[0] and not balances[1]: balances[1] = balances[0] if balances[1]", "= { \"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", } ROUTERS = {", "token_out] else: path = [token_in,wbnb,token_out] elif chain.id == 137: #bsc", "ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if str(token_in) in STABLECOINS: return 1 elif str(paired_against) in", "token in [token_in, token_out]] amount_in = 10 ** ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if", "failing to pull a price for a token you need,", "\"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", } ROUTERS = { \"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"),", "exchange.getTokenToEthInputPrice(10 ** ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block) exchange = Contract(factory.getExchange(usdc)) usdc_bought = exchange.getEthToTokenInputPrice(eth_bought,", "= [str(token) for token in [token_in, token_out]] amount_in = 10", "if wmatic in (token_in, token_out): path = [token_in, token_out] else:", "get_price(token_in, token_out=usdc, router=\"uniswap\", block=None, paired_against=weth): \"\"\" Calculate a price based", "= pair.factory(block_identifier = block) token0 = pair.token0(block_identifier = block) token1", "137: #bsc from .constants import wmatic if wmatic in (token_in,", "{ \"quickswap\": { } } FACTORY_TO_ROUTER = {FACTORIES[name]: ROUTERS[name] for", ",\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] }, \"uniswap\": {", "token price. \"\"\" def extrapolate_balance_if_needed(): nonlocal balances if balances[0] and", "= Contract(address) if chain.id not in [56, 137]: # No", "Contract(factory.getExchange(asset)) eth_bought = exchange.getTokenToEthInputPrice(10 ** ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block) exchange = Contract(factory.getExchange(usdc))", "(token_in, token_out): path = [token_in, token_out] elif paired_against == sushi", "from brownie import Contract, chain from brownie.exceptions import ContractNotFound from", "[token_in,wmatic,token_out] else: path = [token_in, weth, token_out] fees = 0.997", "= busd tokens = [str(token) for token in [token_in, token_out]]", "== 56: #bsc from .constants import cake, wbnb if wbnb", "Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out = busd tokens = [str(token) for token in", "Uniswap Router quote for selling one `token_in`. Always uses intermediate", "== usdc: busd = Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out = busd tokens =", "from elsewhere if not price0: try: price0 = ypricemagic.magic.get_price(tokens[0], block)", "router = ROUTERS[router] try: quote = router.getAmountsOut(amount_in, path, block_identifier=block) amount_out", "} FACTORY_TO_ROUTER = {FACTORIES[name]: ROUTERS[name] for name in FACTORIES} FACTORY_TO_PROTOCOL", "\"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), } SPECIAL_PATHS = { \"sushiswap\": { \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc]", "ROUTERS: router = ROUTERS[router] try: quote = router.getAmountsOut(amount_in, path, block_identifier=block)", ".utils.multicall2 import fetch_multicall from .interfaces.ERC20 import ERC20ABI import ypricemagic.magic import", "price1 balances = extrapolate_balance_if_needed() try: return sum(balances) / supply except", "res, scale, price in zip(reserves, scales, prices)] if price0: balances[0]", "\"factory\"], [pair, \"token0\"], [pair, \"token1\"], [pair, \"totalSupply\"], [pair, \"getReserves\"], block=block", "if price0: balances[0] = reserves[0] / scales[0] * price0 if", "intermediate WETH pair if `[token_in,weth,token_out]` swap path available. \"\"\" if", "path = [token_in, token_out] elif paired_against == sushi and token_out", "price1: balances[1] = reserves[1] / scales[1] * price1 balances =", "token_out): path = [token_in, token_out] else: path = [token_in,wbnb,token_out] elif", "[token_in, token_out] elif paired_against == sushi and token_out != sushi:", "** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return amount_out / fees except ValueError as e:", "token_out): path = [token_in, token_out] elif paired_against == sushi and", "token1 = pair.token1(block_identifier = block) supply = pair.totalSupply(block_identifier = block)", "token_out != sushi: path = [token_in,sushi,weth,token_out] elif str(token_in) in SPECIAL_PATHS[router].keys()", "amount_out = quote[-1] / 10 ** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return amount_out /", "None prices = [price0,price1] balances = [None,None] # [res /", "balances[0] = reserves[0] / scales[0] * price0 if price1: balances[1]", "router in ROUTERS: router = ROUTERS[router] try: quote = router.getAmountsOut(amount_in,", "reserves = fetch_multicall( [pair, \"factory\"], [pair, \"token0\"], [pair, \"token1\"], [pair,", "block) token1 = pair.token1(block_identifier = block) supply = pair.totalSupply(block_identifier =", "from tokenize import tokenize from brownie import Contract, chain from", "need, it's likely because that token requires a special swap", "token1]] price0 = get_price(tokens[0], paired_against=tokens[1], router=router, block=block) price1 = get_price(tokens[1],", "scales, prices)] if price0: balances[0] = reserves[0] / scales[0] *", "#bsc from .constants import wmatic if wmatic in (token_in, token_out):", "STABLECOINS: path = [token_in, paired_against] elif weth in (token_in, token_out):", "pull a price for a token you need, it's likely", "SPECIAL_PATHS = { \"sushiswap\": { \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\":", "and not balances[1]: balances[1] = balances[0] if balances[1] and not", "fetch_multicall( [pair, \"factory\"], [pair, \"token0\"], [pair, \"token1\"], [pair, \"totalSupply\"], [pair,", "@ttl_cache(ttl=600) def get_price_v1(asset, block=None): factory = Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try: exchange =", "[\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\":", "\"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), } SPECIAL_PATHS = { \"sushiswap\": {", "False @ttl_cache(ttl=600) def lp_price(address, block=None): \"\"\" Get Uniswap/Sushiswap LP token", "for res, scale, price in zip(reserves, scales, prices)] except TypeError", "[\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] }, \"uniswap\": { } } elif chain.id == 56:", "usdc_bought / fees except (ContractNotFound, ValueError) as e: pass @memory.cache()", "ypricemagic.magic import ypricemagic.utils.utils from .constants import STABLECOINS, dai, usdc, usdt,", "str(paired_against) in STABLECOINS and str(token_out) in STABLECOINS: path = [token_in,", "path = SPECIAL_PATHS[router][str(token_in)] elif chain.id == 56: #bsc from .constants", "if chain.id == 1: FACTORIES = { \"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\":", "{ \"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", } SPECIAL_PATHS = { \"quickswap\": { }", "= 0.997 ** (len(path) - 1) if router in ROUTERS:", "[pair, \"totalSupply\"], [pair, \"getReserves\"], block=block ) else: factory = pair.factory(block_identifier", "chain.id == 56: ROUTERS = { \"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\")", "in STABLECOINS: return 1 elif str(paired_against) in STABLECOINS and str(token_out)", "* price for res, scale, price in zip(reserves, scales, prices)]", "likely because that token requires a special swap path. #", "scales, prices)] except TypeError as e: # If can't get", "weth, token_out] fees = 0.997 ** (len(path) - 1) if", "ypricemagic.magic.get_price(tokens[0], block) except ypricemagic.magic.PriceError: price0 is None if not price1:", "balances = extrapolate_balance_if_needed() try: return sum(balances) / supply except TypeError:", "# [res / scale * price for res, scale, price", "elif paired_against == sushi and token_out != sushi: path =", "(token_in, token_out): path = [token_in, token_out] else: path = [token_in,wbnb,token_out]", "if balances[1] and not balances[0]: balances[0] = balances[1] return balances", "try: quote = router.getAmountsOut(amount_in, path, block_identifier=block) amount_out = quote[-1] /", "Contract, chain from brownie.exceptions import ContractNotFound from cachetools.func import ttl_cache", "Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") } FACTORIES = { \"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\":", "fetch_multicall from .interfaces.ERC20 import ERC20ABI import ypricemagic.magic import ypricemagic.utils.utils from", "56: #bsc from .constants import cake, wbnb if wbnb in", "path = [token_in,wmatic,token_out] else: path = [token_in, weth, token_out] fees", "0.997 ** (len(path) - 1) if router in ROUTERS: router", "[\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] }, \"uniswap\":", "try: price0 = ypricemagic.magic.get_price(tokens[0], block) except ypricemagic.magic.PriceError: price0 is None", "from .utils.multicall2 import fetch_multicall from .interfaces.ERC20 import ERC20ABI import ypricemagic.magic", "for name in FACTORIES} FACTORY_TO_PROTOCOL = {FACTORIES[name]: name for name", "** ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block) exchange = Contract(factory.getExchange(usdc)) usdc_bought = exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block)", "price for res, scale, price in zip(reserves, scales, prices)] except", "\"\"\" def extrapolate_balance_if_needed(): nonlocal balances if balances[0] and not balances[1]:", "or poly factory, token0, token1, supply, reserves = fetch_multicall( [pair,", "get from elsewhere if not price0: try: price0 = ypricemagic.magic.get_price(tokens[0],", "token_out): path = [token_in, token_out] else: path = [token_in,wmatic,token_out] else:", ".utils.cache import memory from .utils.multicall2 import fetch_multicall from .interfaces.ERC20 import", "FACTORY_TO_ROUTER except (ValueError, OverflowError, AttributeError): pass return False @ttl_cache(ttl=600) def", "data successfully. #project.load() if chain.id == 1: FACTORIES = {", "swap path available. \"\"\" if chain.id == 56 and token_out", "if router in ROUTERS: router = ROUTERS[router] try: quote =", "is None if not price1: try: price1 = ypricemagic.magic.get_price(tokens[1], block)", "== 1: FACTORIES = { \"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", }", "name for name in FACTORIES} @ttl_cache(ttl=36000) def get_price(token_in, token_out=usdc, router=\"uniswap\",", "token requires a special swap path. # Please add a", "[token_in,wbnb,token_out] elif chain.id == 137: #bsc from .constants import wmatic", "prices)] except TypeError as e: # If can't get price", "} elif chain.id == 137: ROUTERS = { \"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\")", "supply, reserves = fetch_multicall( [pair, \"factory\"], [pair, \"token0\"], [pair, \"token1\"],", "import cake, wbnb if wbnb in (token_in, token_out): path =", "[\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] }, \"uniswap\": { } } elif chain.id", "elif chain.id == 56: #bsc from .constants import cake, wbnb", "not price0: try: price0 = ypricemagic.magic.get_price(tokens[0], block) except ypricemagic.magic.PriceError: price0", "fees except ValueError as e: return @ttl_cache(ttl=600) def get_price_v1(asset, block=None):", "router=router, block=block) prices = [price0,price1] scales = [10 ** ypricemagic.utils.utils.get_decimals_with_override(str(token))", "1: FACTORIES = { \"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", } ROUTERS", "[\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\":", "# No multicall2 on bsc or poly factory, token0, token1,", "* price0 if price1: balances[1] = reserves[1] / scales[1] *", "\"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", } ROUTERS = { \"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\":", "= [token_in, token_out] elif cake in (token_in, token_out): path =", "{ \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\":", "FACTORY_TO_PROTOCOL[factory] tokens = [ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for token in [token0, token1]] price0", "ValueError as e: return @ttl_cache(ttl=600) def get_price_v1(asset, block=None): factory =", "= ypricemagic.magic.get_price(tokens[1], block) except ypricemagic.magic.PriceError: price1 is None prices =", ".constants import STABLECOINS, dai, usdc, usdt, wbtc, weth, sushi #", "Please add a viable swap path below to fetch price", "AttributeError): pass return False @ttl_cache(ttl=600) def lp_price(address, block=None): \"\"\" Get", ",\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\",", "get_price(tokens[0], paired_against=tokens[1], router=router, block=block) price1 = get_price(tokens[1], paired_against=tokens[0], router=router, block=block)", "Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") } FACTORIES = { \"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", } SPECIAL_PATHS =", "\"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", } ROUTERS = { \"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), }", "block=block) prices = [price0,price1] scales = [10 ** ypricemagic.utils.utils.get_decimals_with_override(str(token)) for", "import ttl_cache from .utils.cache import memory from .utils.multicall2 import fetch_multicall", "name in FACTORIES} FACTORY_TO_PROTOCOL = {FACTORIES[name]: name for name in", "elsewhere if not price0: try: price0 = ypricemagic.magic.get_price(tokens[0], block) except", "price for a token you need, it's likely because that", "else: path = [token_in,wmatic,token_out] else: path = [token_in, weth, token_out]", "= { \"quickswap\": { } } FACTORY_TO_ROUTER = {FACTORIES[name]: ROUTERS[name]", "else: path = [token_in,wbnb,token_out] elif chain.id == 137: #bsc from", "elif chain.id == 137: #bsc from .constants import wmatic if", "paired_against=tokens[0], router=router, block=block) prices = [price0,price1] scales = [10 **", "= fetch_multicall( [pair, \"factory\"], [pair, \"token0\"], [pair, \"token1\"], [pair, \"totalSupply\"],", "if not price0: try: price0 = ypricemagic.magic.get_price(tokens[0], block) except ypricemagic.magic.PriceError:", "ERC20ABI import ypricemagic.magic import ypricemagic.utils.utils from .constants import STABLECOINS, dai,", "below to fetch price data successfully. #project.load() if chain.id ==", ",\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc]", "token from tokenize import tokenize from brownie import Contract, chain", "paired_against == sushi and token_out != sushi: path = [token_in,sushi,weth,token_out]", "SPECIAL_PATHS[router].keys() and str(token_out) in STABLECOINS: path = SPECIAL_PATHS[router][str(token_in)] elif chain.id", "FACTORIES = { \"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" } SPECIAL_PATHS =", "sushi: path = [token_in,sushi,weth,token_out] elif str(token_in) in SPECIAL_PATHS[router].keys() and str(token_out)", "for token in tokens] supply = supply / 1e18 try:", "price via router, try to get from elsewhere if not", "in [token0, token1]] price0 = get_price(tokens[0], paired_against=tokens[1], router=router, block=block) price1", "balances if balances[0] and not balances[1]: balances[1] = balances[0] if", "= pair.token1(block_identifier = block) supply = pair.totalSupply(block_identifier = block) reserves", "quote for selling one `token_in`. Always uses intermediate WETH pair", "and token_out != sushi: path = [token_in,sushi,weth,token_out] elif str(token_in) in", "def lp_price(address, block=None): \"\"\" Get Uniswap/Sushiswap LP token price. \"\"\"", "[res / scale * price for res, scale, price in", ",\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\":", "return balances pair = Contract(address) if chain.id not in [56,", "price0: try: price0 = ypricemagic.magic.get_price(tokens[0], block) except ypricemagic.magic.PriceError: price0 is", "ROUTERS[router] try: quote = router.getAmountsOut(amount_in, path, block_identifier=block) amount_out = quote[-1]", "@ttl_cache(ttl=36000) def get_price(token_in, token_out=usdc, router=\"uniswap\", block=None, paired_against=weth): \"\"\" Calculate a", "price data successfully. #project.load() if chain.id == 1: FACTORIES =", "ROUTERS[name] for name in FACTORIES} FACTORY_TO_PROTOCOL = {FACTORIES[name]: name for", "price0: balances[0] = reserves[0] / scales[0] * price0 if price1:", "price in zip(reserves, scales, prices)] except TypeError as e: #", "balances[1] and not balances[0]: balances[0] = balances[1] return balances pair", "path = [token_in, token_out] elif cake in (token_in, token_out): path", "return @ttl_cache(ttl=600) def get_price_v1(asset, block=None): factory = Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try: exchange", "{FACTORIES[name]: name for name in FACTORIES} @ttl_cache(ttl=36000) def get_price(token_in, token_out=usdc,", "= [price0,price1] scales = [10 ** ypricemagic.utils.utils.get_decimals_with_override(str(token)) for token in", "in FACTORIES} @ttl_cache(ttl=36000) def get_price(token_in, token_out=usdc, router=\"uniswap\", block=None, paired_against=weth): \"\"\"", "tokenize import tokenize from brownie import Contract, chain from brownie.exceptions", "block_identifier=block) amount_out = quote[-1] / 10 ** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return amount_out", "= [res / scale * price for res, scale, price", "\"\"\" if chain.id == 56 and token_out == usdc: busd", "[pair, \"token1\"], [pair, \"totalSupply\"], [pair, \"getReserves\"], block=block ) else: factory", "pair.getReserves(block_identifier = block) router = FACTORY_TO_PROTOCOL[factory] tokens = [ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for", "\"quickswap\": { } } FACTORY_TO_ROUTER = {FACTORIES[name]: ROUTERS[name] for name", "elif str(token_in) in SPECIAL_PATHS[router].keys() and str(token_out) in STABLECOINS: path =", "dai, usdc, usdt, wbtc, weth, sushi # NOTE: If this", "{ \"pancakeswapv2\": { }, \"pancakeswapv1\": { } } elif chain.id", "Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try: exchange = Contract(factory.getExchange(asset)) eth_bought = exchange.getTokenToEthInputPrice(10 ** ypricemagic.utils.utils.get_decimals_with_override(asset),", "exchange = Contract(factory.getExchange(asset)) eth_bought = exchange.getTokenToEthInputPrice(10 ** ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block) exchange", "price0 = get_price(tokens[0], paired_against=tokens[1], router=router, block=block) price1 = get_price(tokens[1], paired_against=tokens[0],", "router, try to get from elsewhere if not price0: try:", "ypricemagic.magic.get_price(tokens[1], block) except ypricemagic.magic.PriceError: price1 is None prices = [price0,price1]", "tokens = [ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for token in [token0, token1]] price0 =", "}, \"pancakeswapv1\": { } } elif chain.id == 137: ROUTERS", "elif chain.id == 137: ROUTERS = { \"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") }", "fees = 0.997 ** (len(path) - 1) if router in", "1 elif str(paired_against) in STABLECOINS and str(token_out) in STABLECOINS: path", "if `[token_in,weth,token_out]` swap path available. \"\"\" if chain.id == 56", "!= sushi: path = [token_in,sushi,weth,token_out] elif str(token_in) in SPECIAL_PATHS[router].keys() and", "chain from brownie.exceptions import ContractNotFound from cachetools.func import ttl_cache from", "\"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") } FACTORIES = { \"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\",", "import fetch_multicall from .interfaces.ERC20 import ERC20ABI import ypricemagic.magic import ypricemagic.utils.utils", "in STABLECOINS: path = [token_in, paired_against] elif weth in (token_in,", "and not balances[0]: balances[0] = balances[1] return balances pair =", "{ } } elif chain.id == 56: ROUTERS = {", "for selling one `token_in`. Always uses intermediate WETH pair if", "ypricemagic.magic.PriceError: price0 is None if not price1: try: price1 =", "\"sushiswap\": { \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc]", "path = [token_in,wbnb,token_out] elif chain.id == 137: #bsc from .constants", "str(token_out) in STABLECOINS: path = [token_in, paired_against] elif weth in", "def get_price_v1(asset, block=None): factory = Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try: exchange = Contract(factory.getExchange(asset))", "2 return usdc_bought / fees except (ContractNotFound, ValueError) as e:", "/ 1e18 try: balances = [res / scale * price", "= [token_in, weth, token_out] fees = 0.997 ** (len(path) -", "ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return amount_out / fees except ValueError as e: return", "(ContractNotFound, ValueError) as e: pass @memory.cache() def is_uniswap_pool(address): try: return", ",\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc]", "/ fees except ValueError as e: return @ttl_cache(ttl=600) def get_price_v1(asset,", "/ 1e6 fees = 0.997 ** 2 return usdc_bought /", "pair.totalSupply(block_identifier = block) reserves = pair.getReserves(block_identifier = block) router =", ",\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc]", "[10 ** ypricemagic.utils.utils.get_decimals_with_override(str(token)) for token in tokens] supply = supply", "import token from tokenize import tokenize from brownie import Contract,", "ypricemagic.utils.utils from .constants import STABLECOINS, dai, usdc, usdt, wbtc, weth,", "LP token price. \"\"\" def extrapolate_balance_if_needed(): nonlocal balances if balances[0]", "STABLECOINS: path = SPECIAL_PATHS[router][str(token_in)] elif chain.id == 56: #bsc from", "from .interfaces.ERC20 import ERC20ABI import ypricemagic.magic import ypricemagic.utils.utils from .constants", "and str(token_out) in STABLECOINS: path = SPECIAL_PATHS[router][str(token_in)] elif chain.id ==", "\"getReserves\"], block=block ) else: factory = pair.factory(block_identifier = block) token0", "balances = [res / scale * price for res, scale,", "try: exchange = Contract(factory.getExchange(asset)) eth_bought = exchange.getTokenToEthInputPrice(10 ** ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block)", "to get from elsewhere if not price0: try: price0 =", "quote[-1] / 10 ** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return amount_out / fees except", "If can't get price via router, try to get from", "in [56, 137]: # No multicall2 on bsc or poly", "price0 if price1: balances[1] = reserves[1] / scales[1] * price1", "from .constants import cake, wbnb if wbnb in (token_in, token_out):", "return amount_out / fees except ValueError as e: return @ttl_cache(ttl=600)", "def extrapolate_balance_if_needed(): nonlocal balances if balances[0] and not balances[1]: balances[1]", "\"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") } FACTORIES = { \"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", } SPECIAL_PATHS", "block=None): \"\"\" Get Uniswap/Sushiswap LP token price. \"\"\" def extrapolate_balance_if_needed():", "token1, supply, reserves = fetch_multicall( [pair, \"factory\"], [pair, \"token0\"], [pair,", "token in [token0, token1]] price0 = get_price(tokens[0], paired_against=tokens[1], router=router, block=block)", "= pair.totalSupply(block_identifier = block) reserves = pair.getReserves(block_identifier = block) router", "ypricemagic.magic.PriceError: price1 is None prices = [price0,price1] balances = [None,None]", "router=\"uniswap\", block=None, paired_against=weth): \"\"\" Calculate a price based on Uniswap", "token_out] elif cake in (token_in, token_out): path = [token_in, token_out]", "busd = Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out = busd tokens = [str(token) for", "e: # If can't get price via router, try to", "str(token_out) in STABLECOINS: path = SPECIAL_PATHS[router][str(token_in)] elif chain.id == 56:", "[token0, token1]] price0 = get_price(tokens[0], paired_against=tokens[1], router=router, block=block) price1 =", ") else: factory = pair.factory(block_identifier = block) token0 = pair.token0(block_identifier", "1e18 try: balances = [res / scale * price for", "usdc: busd = Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out = busd tokens = [str(token)", "= [token_in, token_out] else: path = [token_in,wbnb,token_out] elif chain.id ==", "137: ROUTERS = { \"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") } FACTORIES = {", "balances[0] if balances[1] and not balances[0]: balances[0] = balances[1] return", "** ypricemagic.utils.utils.get_decimals_with_override(str(token)) for token in tokens] supply = supply /", "price1: try: price1 = ypricemagic.magic.get_price(tokens[1], block) except ypricemagic.magic.PriceError: price1 is", "`[token_in,weth,token_out]` swap path available. \"\"\" if chain.id == 56 and", "cake in (token_in, token_out): path = [token_in, token_out] else: path", "a price based on Uniswap Router quote for selling one", "one `token_in`. Always uses intermediate WETH pair if `[token_in,weth,token_out]` swap", "chain.id == 1: FACTORIES = { \"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\",", "10 ** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1])) return amount_out / fees except ValueError as", "Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), } SPECIAL_PATHS = { \"sushiswap\": { \"0xEF69B5697f2Fb0345cC680210fD39b593a2f9684\": [\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\":", "prices = [price0,price1] scales = [10 ** ypricemagic.utils.utils.get_decimals_with_override(str(token)) for token", "SPECIAL_PATHS = { \"pancakeswapv2\": { }, \"pancakeswapv1\": { } }", "as e: return @ttl_cache(ttl=600) def get_price_v1(asset, block=None): factory = Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\")", "\"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", } SPECIAL_PATHS = { \"quickswap\": { } }", "in STABLECOINS: path = SPECIAL_PATHS[router][str(token_in)] elif chain.id == 56: #bsc", "[pair, \"factory\"], [pair, \"token0\"], [pair, \"token1\"], [pair, \"totalSupply\"], [pair, \"getReserves\"],", "block) except ypricemagic.magic.PriceError: price1 is None prices = [price0,price1] balances", "{ } } FACTORY_TO_ROUTER = {FACTORIES[name]: ROUTERS[name] for name in", "= [price0,price1] balances = [None,None] # [res / scale *", "successfully. #project.load() if chain.id == 1: FACTORIES = { \"uniswap\":", "weth, sushi # NOTE: If this is failing to pull", "in zip(reserves, scales, prices)] except TypeError as e: # If", ",\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] }, \"uniswap\": { } } elif chain.id ==", "except ValueError as e: return @ttl_cache(ttl=600) def get_price_v1(asset, block=None): factory", "#project.load() if chain.id == 1: FACTORIES = { \"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\",", "path = [token_in, token_out] else: path = [token_in,wmatic,token_out] else: path", "= block) reserves = pair.getReserves(block_identifier = block) router = FACTORY_TO_PROTOCOL[factory]", "= SPECIAL_PATHS[router][str(token_in)] elif chain.id == 56: #bsc from .constants import", "= Contract(factory.getExchange(usdc)) usdc_bought = exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block) / 1e6 fees =", "str(token_in) in SPECIAL_PATHS[router].keys() and str(token_out) in STABLECOINS: path = SPECIAL_PATHS[router][str(token_in)]", "import STABLECOINS, dai, usdc, usdt, wbtc, weth, sushi # NOTE:", "on bsc or poly factory, token0, token1, supply, reserves =", "} } elif chain.id == 137: ROUTERS = { \"quickswap\":", "elif cake in (token_in, token_out): path = [token_in, token_out] else:", "balances[1] return balances pair = Contract(address) if chain.id not in", "block=block) price1 = get_price(tokens[1], paired_against=tokens[0], router=router, block=block) prices = [price0,price1]", "return usdc_bought / fees except (ContractNotFound, ValueError) as e: pass", "requires a special swap path. # Please add a viable", "\"token1\"], [pair, \"totalSupply\"], [pair, \"getReserves\"], block=block ) else: factory =", "pair if `[token_in,weth,token_out]` swap path available. \"\"\" if chain.id ==", "router = FACTORY_TO_PROTOCOL[factory] tokens = [ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for token in [token0,", "= Contract(factory.getExchange(asset)) eth_bought = exchange.getTokenToEthInputPrice(10 ** ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block) exchange =", "ROUTERS = { \"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), } SPECIAL_PATHS =", "multicall2 on bsc or poly factory, token0, token1, supply, reserves", "= block) supply = pair.totalSupply(block_identifier = block) reserves = pair.getReserves(block_identifier", "# Please add a viable swap path below to fetch", "}, \"uniswap\": { } } elif chain.id == 56: ROUTERS", "scale, price in zip(reserves, scales, prices)] if price0: balances[0] =", "ttl_cache from .utils.cache import memory from .utils.multicall2 import fetch_multicall from", "= block) token0 = pair.token0(block_identifier = block) token1 = pair.token1(block_identifier", "usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai] ,\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\": [\"0xafcE9B78D409bF74980CACF610AFB851BF02F257\",wbtc,weth,usdc] }, \"uniswap\": { } }", "on Uniswap Router quote for selling one `token_in`. Always uses", "ypricemagic.utils.utils.get_decimals_with_override(asset), block_identifier=block) exchange = Contract(factory.getExchange(usdc)) usdc_bought = exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block) /", "from .constants import wmatic if wmatic in (token_in, token_out): path", "= [token_in, paired_against] elif weth in (token_in, token_out): path =", "ValueError) as e: pass @memory.cache() def is_uniswap_pool(address): try: return Contract(address).factory()", "str(token_in) in STABLECOINS: return 1 elif str(paired_against) in STABLECOINS and", "@ttl_cache(ttl=600) def lp_price(address, block=None): \"\"\" Get Uniswap/Sushiswap LP token price.", "router=router, block=block) price1 = get_price(tokens[1], paired_against=tokens[0], router=router, block=block) prices =", "[\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\": [\"0x383518188C0C6d7730D91b2c03a03C837814a899\",dai]", "STABLECOINS: return 1 elif str(paired_against) in STABLECOINS and str(token_out) in", "a token you need, it's likely because that token requires", "= supply / 1e18 try: balances = [res / scale", "import memory from .utils.multicall2 import fetch_multicall from .interfaces.ERC20 import ERC20ABI", "balances[1]: balances[1] = balances[0] if balances[1] and not balances[0]: balances[0]", "usdt, wbtc, weth, sushi # NOTE: If this is failing", "price. \"\"\" def extrapolate_balance_if_needed(): nonlocal balances if balances[0] and not", "amount_in = 10 ** ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if str(token_in) in STABLECOINS: return", "return False @ttl_cache(ttl=600) def lp_price(address, block=None): \"\"\" Get Uniswap/Sushiswap LP", "token_out] fees = 0.997 ** (len(path) - 1) if router", "Always uses intermediate WETH pair if `[token_in,weth,token_out]` swap path available.", "special swap path. # Please add a viable swap path", "\"uniswap\": { } } elif chain.id == 56: ROUTERS =", "get_price_v1(asset, block=None): factory = Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try: exchange = Contract(factory.getExchange(asset)) eth_bought", "= { \"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", } SPECIAL_PATHS = { \"quickswap\": {", "try: balances = [res / scale * price for res,", "price1 = ypricemagic.magic.get_price(tokens[1], block) except ypricemagic.magic.PriceError: price1 is None prices", "supply = pair.totalSupply(block_identifier = block) reserves = pair.getReserves(block_identifier = block)", "in zip(reserves, scales, prices)] if price0: balances[0] = reserves[0] /", "a viable swap path below to fetch price data successfully.", "== sushi and token_out != sushi: path = [token_in,sushi,weth,token_out] elif", "tokenize from brownie import Contract, chain from brownie.exceptions import ContractNotFound", "56 and token_out == usdc: busd = Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out =", "token_out == usdc: busd = Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out = busd tokens", "** 2 return usdc_bought / fees except (ContractNotFound, ValueError) as", "exchange = Contract(factory.getExchange(usdc)) usdc_bought = exchange.getEthToTokenInputPrice(eth_bought, block_identifier=block) / 1e6 fees", "NOTE: If this is failing to pull a price for", "scale * price for res, scale, price in zip(reserves, scales,", "uses intermediate WETH pair if `[token_in,weth,token_out]` swap path available. \"\"\"", "\"\"\" Get Uniswap/Sushiswap LP token price. \"\"\" def extrapolate_balance_if_needed(): nonlocal", "to fetch price data successfully. #project.load() if chain.id == 1:", "STABLECOINS, dai, usdc, usdt, wbtc, weth, sushi # NOTE: If", "and token_out == usdc: busd = Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out = busd", "[\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\":", "pair.token0(block_identifier = block) token1 = pair.token1(block_identifier = block) supply =", "import Contract, chain from brownie.exceptions import ContractNotFound from cachetools.func import", "= pair.token0(block_identifier = block) token1 = pair.token1(block_identifier = block) supply", "= reserves[0] / scales[0] * price0 if price1: balances[1] =", "chain.id not in [56, 137]: # No multicall2 on bsc", "add a viable swap path below to fetch price data", "price based on Uniswap Router quote for selling one `token_in`.", "elif chain.id == 56: ROUTERS = { \"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\":", "viable swap path below to fetch price data successfully. #project.load()", "elif str(paired_against) in STABLECOINS and str(token_out) in STABLECOINS: path =", "to pull a price for a token you need, it's", "} FACTORIES = { \"quickswap\": \"0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32\", } SPECIAL_PATHS = {", "selling one `token_in`. Always uses intermediate WETH pair if `[token_in,weth,token_out]`", "in SPECIAL_PATHS[router].keys() and str(token_out) in STABLECOINS: path = SPECIAL_PATHS[router][str(token_in)] elif", "balances pair = Contract(address) if chain.id not in [56, 137]:", "path = [token_in,sushi,weth,token_out] elif str(token_in) in SPECIAL_PATHS[router].keys() and str(token_out) in", "balances[0]: balances[0] = balances[1] return balances pair = Contract(address) if", "= {FACTORIES[name]: ROUTERS[name] for name in FACTORIES} FACTORY_TO_PROTOCOL = {FACTORIES[name]:", "prices = [price0,price1] balances = [None,None] # [res / scale", "fees = 0.997 ** 2 return usdc_bought / fees except", ",\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc]", "\"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" } SPECIAL_PATHS = { \"pancakeswapv2\": { }, \"pancakeswapv1\": {", "not balances[1]: balances[1] = balances[0] if balances[1] and not balances[0]:", "137]: # No multicall2 on bsc or poly factory, token0,", "** ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if str(token_in) in STABLECOINS: return 1 elif str(paired_against)", "# NOTE: If this is failing to pull a price", "not in [56, 137]: # No multicall2 on bsc or", "scales[1] * price1 balances = extrapolate_balance_if_needed() try: return sum(balances) /", "= 10 ** ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if str(token_in) in STABLECOINS: return 1", "import ypricemagic.magic import ypricemagic.utils.utils from .constants import STABLECOINS, dai, usdc,", "a price for a token you need, it's likely because", "name in FACTORIES} @ttl_cache(ttl=36000) def get_price(token_in, token_out=usdc, router=\"uniswap\", block=None, paired_against=weth):", "[\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\":", "== 56 and token_out == usdc: busd = Contract(\"0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56\") token_out", "not balances[0]: balances[0] = balances[1] return balances pair = Contract(address)", "reserves[0] / scales[0] * price0 if price1: balances[1] = reserves[1]", "get_price(tokens[1], paired_against=tokens[0], router=router, block=block) prices = [price0,price1] scales = [10", ".interfaces.ERC20 import ERC20ABI import ypricemagic.magic import ypricemagic.utils.utils from .constants import", "token_out = busd tokens = [str(token) for token in [token_in,", "for token in [token0, token1]] price0 = get_price(tokens[0], paired_against=tokens[1], router=router,", "{ } } elif chain.id == 137: ROUTERS = {", "0.997 ** 2 return usdc_bought / fees except (ContractNotFound, ValueError)", "price1 = get_price(tokens[1], paired_against=tokens[0], router=router, block=block) prices = [price0,price1] scales", "== 56: ROUTERS = { \"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") }", "= { \"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") } FACTORIES = {", "= {FACTORIES[name]: name for name in FACTORIES} @ttl_cache(ttl=36000) def get_price(token_in,", "is_uniswap_pool(address): try: return Contract(address).factory() in FACTORY_TO_ROUTER except (ValueError, OverflowError, AttributeError):", "Contract(address).factory() in FACTORY_TO_ROUTER except (ValueError, OverflowError, AttributeError): pass return False", "== 137: #bsc from .constants import wmatic if wmatic in", "= { \"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" } SPECIAL_PATHS = {", "paired_against=tokens[1], router=router, block=block) price1 = get_price(tokens[1], paired_against=tokens[0], router=router, block=block) prices", "is failing to pull a price for a token you", "quote = router.getAmountsOut(amount_in, path, block_identifier=block) amount_out = quote[-1] / 10", "# If can't get price via router, try to get", "= [token_in,sushi,weth,token_out] elif str(token_in) in SPECIAL_PATHS[router].keys() and str(token_out) in STABLECOINS:", "** (len(path) - 1) if router in ROUTERS: router =", "nonlocal balances if balances[0] and not balances[1]: balances[1] = balances[0]", "Contract(address) if chain.id not in [56, 137]: # No multicall2", "FACTORY_TO_PROTOCOL = {FACTORIES[name]: name for name in FACTORIES} @ttl_cache(ttl=36000) def", "block=None): factory = Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try: exchange = Contract(factory.getExchange(asset)) eth_bought =", "} ROUTERS = { \"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), } SPECIAL_PATHS", "{ \"uniswap\": Contract(\"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D\"), \"sushiswap\": Contract(\"0xD9E1CE17F2641F24AE83637AB66A2CCA9C378B9F\"), } SPECIAL_PATHS = { \"sushiswap\":", "import ContractNotFound from cachetools.func import ttl_cache from .utils.cache import memory", "} FACTORIES = { \"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" } SPECIAL_PATHS", "(len(path) - 1) if router in ROUTERS: router = ROUTERS[router]", "[pair, \"token0\"], [pair, \"token1\"], [pair, \"totalSupply\"], [pair, \"getReserves\"], block=block )", "it's likely because that token requires a special swap path.", "router.getAmountsOut(amount_in, path, block_identifier=block) amount_out = quote[-1] / 10 ** ypricemagic.utils.utils.get_decimals_with_override(str(path[-1]))", "reserves = pair.getReserves(block_identifier = block) router = FACTORY_TO_PROTOCOL[factory] tokens =", "[price0,price1] scales = [10 ** ypricemagic.utils.utils.get_decimals_with_override(str(token)) for token in tokens]", "poly factory, token0, token1, supply, reserves = fetch_multicall( [pair, \"factory\"],", "a special swap path. # Please add a viable swap", "return 1 elif str(paired_against) in STABLECOINS and str(token_out) in STABLECOINS:", "res, scale, price in zip(reserves, scales, prices)] except TypeError as", "{ \"uniswap\": \"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f\", \"sushiswap\": \"0xC0AEe478e3658e2610c5F7A4A2E1777cE9e4f2Ac\", } ROUTERS = { \"uniswap\":", "{FACTORIES[name]: ROUTERS[name] for name in FACTORIES} FACTORY_TO_PROTOCOL = {FACTORIES[name]: name", "and str(token_out) in STABLECOINS: path = [token_in, paired_against] elif weth", "= reserves[1] / scales[1] * price1 balances = extrapolate_balance_if_needed() try:", "/ scales[0] * price0 if price1: balances[1] = reserves[1] /", "{ \"pancakeswapv2\": Contract(\"0x10ED43C718714eb63d5aA57B78B54704E256024E\"), \"pancakeswapv1\": Contract(\"0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F\") } FACTORIES = { \"pancakeswapv2\":", "\"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" } SPECIAL_PATHS = { \"pancakeswapv2\": { }, \"pancakeswapv1\":", "try: return Contract(address).factory() in FACTORY_TO_ROUTER except (ValueError, OverflowError, AttributeError): pass", "get price via router, try to get from elsewhere if", "paired_against=weth): \"\"\" Calculate a price based on Uniswap Router quote", "for token in [token_in, token_out]] amount_in = 10 ** ypricemagic.utils.utils.get_decimals_with_override(tokens[0])", "(token_in, token_out): path = [token_in, token_out] elif cake in (token_in,", "= FACTORY_TO_PROTOCOL[factory] tokens = [ypricemagic.utils.utils.Contract_with_erc20_fallback(token) for token in [token0, token1]]", "in ROUTERS: router = ROUTERS[router] try: quote = router.getAmountsOut(amount_in, path,", "can't get price via router, try to get from elsewhere", "balances = [None,None] # [res / scale * price for", "token0 = pair.token0(block_identifier = block) token1 = pair.token1(block_identifier = block)", "Get Uniswap/Sushiswap LP token price. \"\"\" def extrapolate_balance_if_needed(): nonlocal balances", "swap path below to fetch price data successfully. #project.load() if", "wbnb in (token_in, token_out): path = [token_in, token_out] elif cake", "[token_in, paired_against] elif weth in (token_in, token_out): path = [token_in,", "import ERC20ABI import ypricemagic.magic import ypricemagic.utils.utils from .constants import STABLECOINS,", "{ \"pancakeswapv2\": \"0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73\", \"pancakeswapv1\": \"0xBCfCcbde45cE874adCB698cC183deBcF17952812\" } SPECIAL_PATHS = { \"pancakeswapv2\":", ",\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\": [\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\",\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\": [\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt]", ".constants import cake, wbnb if wbnb in (token_in, token_out): path", "None if not price1: try: price1 = ypricemagic.magic.get_price(tokens[1], block) except", "[\"<KEY>\",\"0x6B3595068778DD592e39A122f4f5a5cF09C90fE2\",weth,usdc] ,\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\": [\"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e\",\"0xC28E27870558cF22ADD83540d2126da2e4b464c2\",weth,usdc] ,\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\": [\"0x3166C570935a7D8554c8f4eA792ff965D2EFe1f2\",\"<KEY>\",usdc] ,\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\": [\"0xE6279E1c65DD41b30bA3760DCaC3CD8bbb4420D6\",\"0x87F5F9eBE40786D49D35E1B5997b07cCAA8ADbFF\",weth,usdc] ,\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\": [\"0x4954Db6391F4feB5468b6B943D4935353596aEC9\",usdc] ,\"0x1E18821E69B9FAA8e6e75DFFe54E7E25754beDa0\":", "\"totalSupply\"], [pair, \"getReserves\"], block=block ) else: factory = pair.factory(block_identifier =", ",\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc] ,\"0x383518188C0C6d7730D91b2c03a03C837814a899\":", "try: price1 = ypricemagic.magic.get_price(tokens[1], block) except ypricemagic.magic.PriceError: price1 is None", "import tokenize from brownie import Contract, chain from brownie.exceptions import", "WETH pair if `[token_in,weth,token_out]` swap path available. \"\"\" if chain.id", "balances[0] = balances[1] return balances pair = Contract(address) if chain.id", "factory = Contract(\"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95\") try: exchange = Contract(factory.getExchange(asset)) eth_bought = exchange.getTokenToEthInputPrice(10", "weth in (token_in, token_out): path = [token_in, token_out] elif paired_against", "block=None, paired_against=weth): \"\"\" Calculate a price based on Uniswap Router", "zip(reserves, scales, prices)] if price0: balances[0] = reserves[0] / scales[0]", "token0, token1, supply, reserves = fetch_multicall( [pair, \"factory\"], [pair, \"token0\"],", "[\"0xfC1E690f61EFd961294b3e1Ce3313fBD8aa4f85d\",\"0xba100000625a3754423978a60c9317c58a424e3D\",weth,usdc] ,\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\": [\"0xBA50933C268F567BDC86E1aC131BE072C6B0b71a\",weth,usdc] ,\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\": [\"0x6102407f07029892eB5Ff02164ADFaFb85f4d222\",usdt] ,\"0x85034b3b2e292493D029443455Cc62ab669573B3\": [\"0x85034b3b2e292493D029443455Cc62ab669573B3\",\"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984\",weth,usdc] ,\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\": [\"0xb220D53F7D0f52897Bcf25E47c4c3DC0bac344F8\", usdc]", "in STABLECOINS and str(token_out) in STABLECOINS: path = [token_in, paired_against]", "cake, wbnb if wbnb in (token_in, token_out): path = [token_in,", "[56, 137]: # No multicall2 on bsc or poly factory,", "chain.id == 137: ROUTERS = { \"quickswap\": Contract(\"0xa5E0829CaCEd8fFDD4De3c43696c57F7D7A678ff\") } FACTORIES", "if chain.id not in [56, 137]: # No multicall2 on", "= router.getAmountsOut(amount_in, path, block_identifier=block) amount_out = quote[-1] / 10 **", "10 ** ypricemagic.utils.utils.get_decimals_with_override(tokens[0]) if str(token_in) in STABLECOINS: return 1 elif" ]
[ "model configuration \"\"\" class TextRNNConfig(object): def __init__( self, vocab_size=30000, pretrained_embedding=None,", "embedding_matrix self.embedding_dim = embedding_dim self.embedding_dropout = embedding_dropout self.lstm_hidden_size = lstm_hidden_size", "\"\"\" class TextRNNConfig(object): def __init__( self, vocab_size=30000, pretrained_embedding=None, embedding_matrix=None, embedding_dim=300,", "= embedding_matrix self.embedding_dim = embedding_dim self.embedding_dropout = embedding_dropout self.lstm_hidden_size =", "TextRNNConfig(object): def __init__( self, vocab_size=30000, pretrained_embedding=None, embedding_matrix=None, embedding_dim=300, embedding_dropout=0.3, lstm_hidden_size=128,", "= embedding_dim self.embedding_dropout = embedding_dropout self.lstm_hidden_size = lstm_hidden_size self.output_dim =", "embedding_matrix=None, embedding_dim=300, embedding_dropout=0.3, lstm_hidden_size=128, output_dim=1, **kwargs ): self.pretrained_embedding = pretrained_embedding", "embedding_dim self.embedding_dropout = embedding_dropout self.lstm_hidden_size = lstm_hidden_size self.output_dim = output_dim", "__init__( self, vocab_size=30000, pretrained_embedding=None, embedding_matrix=None, embedding_dim=300, embedding_dropout=0.3, lstm_hidden_size=128, output_dim=1, **kwargs", "embedding_dropout=0.3, lstm_hidden_size=128, output_dim=1, **kwargs ): self.pretrained_embedding = pretrained_embedding self.embedding_matrix =", "self.pretrained_embedding = pretrained_embedding self.embedding_matrix = embedding_matrix self.embedding_dim = embedding_dim self.embedding_dropout", "= pretrained_embedding self.embedding_matrix = embedding_matrix self.embedding_dim = embedding_dim self.embedding_dropout =", "self, vocab_size=30000, pretrained_embedding=None, embedding_matrix=None, embedding_dim=300, embedding_dropout=0.3, lstm_hidden_size=128, output_dim=1, **kwargs ):", "vocab_size=30000, pretrained_embedding=None, embedding_matrix=None, embedding_dim=300, embedding_dropout=0.3, lstm_hidden_size=128, output_dim=1, **kwargs ): self.pretrained_embedding", "self.embedding_dim = embedding_dim self.embedding_dropout = embedding_dropout self.lstm_hidden_size = lstm_hidden_size self.output_dim", "\"\"\" TextRNN model configuration \"\"\" class TextRNNConfig(object): def __init__( self,", "output_dim=1, **kwargs ): self.pretrained_embedding = pretrained_embedding self.embedding_matrix = embedding_matrix self.embedding_dim", "embedding_dim=300, embedding_dropout=0.3, lstm_hidden_size=128, output_dim=1, **kwargs ): self.pretrained_embedding = pretrained_embedding self.embedding_matrix", "pretrained_embedding=None, embedding_matrix=None, embedding_dim=300, embedding_dropout=0.3, lstm_hidden_size=128, output_dim=1, **kwargs ): self.pretrained_embedding =", "configuration \"\"\" class TextRNNConfig(object): def __init__( self, vocab_size=30000, pretrained_embedding=None, embedding_matrix=None,", "pretrained_embedding self.embedding_matrix = embedding_matrix self.embedding_dim = embedding_dim self.embedding_dropout = embedding_dropout", "lstm_hidden_size=128, output_dim=1, **kwargs ): self.pretrained_embedding = pretrained_embedding self.embedding_matrix = embedding_matrix", "class TextRNNConfig(object): def __init__( self, vocab_size=30000, pretrained_embedding=None, embedding_matrix=None, embedding_dim=300, embedding_dropout=0.3,", "): self.pretrained_embedding = pretrained_embedding self.embedding_matrix = embedding_matrix self.embedding_dim = embedding_dim", "**kwargs ): self.pretrained_embedding = pretrained_embedding self.embedding_matrix = embedding_matrix self.embedding_dim =", "self.embedding_matrix = embedding_matrix self.embedding_dim = embedding_dim self.embedding_dropout = embedding_dropout self.lstm_hidden_size", "TextRNN model configuration \"\"\" class TextRNNConfig(object): def __init__( self, vocab_size=30000,", "def __init__( self, vocab_size=30000, pretrained_embedding=None, embedding_matrix=None, embedding_dim=300, embedding_dropout=0.3, lstm_hidden_size=128, output_dim=1," ]
[]
[ "the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Gen_QueryAndGroupXYZ, self).__init__()", "return output @staticmethod def backward(ctx, grad_out): idx, c, n =", "m, unknown, known, dist2, idx) return torch.sqrt(dist2), idx @staticmethod def", "if new_xyz is None: new_xyz = xyz if idx is", "label_stat: (b, n, nclass) :param idx: (b, m, nsample) :return:", "dist2 = torch.cuda.FloatTensor(b, n, 3) idx = torch.cuda.IntTensor(b, n, 3)", "Gathering(Function): @staticmethod def forward(ctx, features, idx): \"\"\" input: features: (b,", "radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_OnlyFeature, self).__init__() self.radius, self.nsample, self.use_xyz = radius,", "assert label_stat.is_contiguous() assert idx.is_contiguous() b, n, nclass = label_stat.size() m", ":param ctx: :param max_xyz: (b, n, 3) :param xyz: (b,", "= grouped_xyz return grouped_xyz, new_features class Gen_QueryAndGroupXYZ(nn.Module): \"\"\" Groups with", "= grouping(xyz_trans, idx) # (b, 3, m, nsample) return grouped_xyz", "= xyz.transpose(1, 2).contiguous() # BxNx3 -> Bx3xN grouped_xyz = grouping(xyz_trans,", "(b, m, nsample) containing the indicies of features to group", "def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None) -> torch.Tensor:", "torch.Tensor) -> torch.Tensor: \"\"\" Performs weight linear interpolation on 3", "m, nsample) pointops_cuda.grouping_int_forward_cuda(b, c, n, m, nsample, features, idx, output)", "= xyz.repeat(1, m, 1).view(b, m * n, 3) # dist", "nsample) idx = knnquery(2*self.nsample, xyz, new_xyz) # (b, m, nsample)", "return new_label_stat, idx @staticmethod def backward(ctx, a=None, b=None): return None,", "(b, n, 3) xyz coordinates of the features new_xyz: torch.Tensor,", "def __init__(self, radius=None, nsample=32, use_xyz=True): super(Gen_QueryAndGroupXYZ, self).__init__() self.radius, self.nsample, self.use_xyz", "m = known.size(1) dist2 = torch.cuda.FloatTensor(b, n, 3) idx =", "GroupingInt(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor:", "return new_features class GroupAll(nn.Module): \"\"\" Groups all features \"\"\" def", "features.is_contiguous() assert idx.is_contiguous() assert weight.is_contiguous() b, c, m = features.size()", "self.use_xyz, \"Cannot have not features and not use xyz as", "= torch.zeros(b, m, nsample).int().cuda() for i in range(b): dist =", "= torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, 1, n) else:", "class Gathering(Function): @staticmethod def forward(ctx, features, idx): \"\"\" input: features:", "torch.Tensor: \"\"\" input: radius: float, radius of the balls nsample:", "-> torch.Tensor: ''' :param ctx: :param nsample: :param label_stat: (b,", "''' idx = torch.zeros(b, m, nsample).int().cuda() for i in range(b):", "features, idx): \"\"\" input: features: (b, c, n), idx :", "nsample) #xyz_trans = xyz.transpose(1, 2).contiguous() #grouped_xyz = grouping(xyz_trans, idx) #", "( dist2: (b, m, nsample) ) \"\"\" if new_xyz is", "xyz if idx is None: if self.radius is not None:", "knnquery(self.nsample, xyz, new_xyz) # (b, m, nsample) #xyz_trans = xyz.transpose(1,", "# ''' # new_xyz_repeat = new_xyz.repeat(1, 1, n).view(b, m *", "#grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features is not None: grouped_features", "import numpy as np return torch.clamp(dist, 0.0, np.inf) class KNNQueryNaive(Function):", "(b, n, 3) and n > m, m: int32 output:", "known.size(1) dist2 = torch.cuda.FloatTensor(b, n, 3) idx = torch.cuda.IntTensor(b, n,", "new_xyz = xyz #if idx is None: if self.radius is", "(b, m, nsample) \"\"\" assert xyz.size() == new_xyz.size() if new_xyz", "n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b, n, m, xyz, temp, idx) return idx @staticmethod", "n, 3) # dist = (new_xyz_repeat - xyz_repeat).pow(2).sum(dim=2).view(b, m, n)", "None ballquery = BallQuery.apply class FeatureDistribute(Function): @staticmethod def forward(ctx, max_xyz:", "||x[i,:]-y[j,:]||^2 ''' x_norm = (x ** 2).sum(1).view(-1, 1) if y", "n, _ = xyz.size() m = new_xyz.size(1) idx = torch.cuda.IntTensor(b,", "None knnquery_naive = KNNQueryNaive.apply class KNNQuery(Function): @staticmethod def forward(ctx, nsample:", "known.is_contiguous() b, n, _ = unknown.size() m = known.size(1) dist2", "3), known: (b, m, 3) output: dist2: (b, n, 3)", "nsample) \"\"\" if new_xyz is None: new_xyz = xyz #if", "None gathering = Gathering.apply class NearestNeighbor(Function): @staticmethod def forward(ctx, unknown:", "(b, c+3, 1, n) else: new_features = grouped_features else: new_features", "\"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_SameSize, self).__init__() self.radius, self.nsample,", "b, n, _ = xyz.size() idx = torch.cuda.IntTensor(b, m) temp", "maximum number of features in the balls xyz: torch.Tensor, (b,", "idx = knnquery(self.nsample, xyz, new_xyz) # (b, m, nsample) #xyz_trans", "torch.cuda.FloatTensor(b, c, n).zero_() grad_distribute_feature_data = grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b, n, m, c,", "= torch.cuda.IntTensor(b, m).zero_() pointops_cuda.featuredistribute_cuda(b, n, m, max_xyz, xyz, distribute_idx) return", "new_features: (b, c+3, 1, N) tensor \"\"\" grouped_xyz = xyz.transpose(1,", "assert new_xyz.is_contiguous() b, m, _ = new_xyz.size() n = xyz.size(1)", "\"\"\" assert xyz.is_contiguous() b, n, _ = xyz.size() idx =", "radius: float, radius of the balls nsample: int, maximum number", "return new_label_stat @staticmethod def backward(ctx, a=None): return None, None, None,", "\"\"\" KNN Indexing input: nsample: int32, Number of neighbor xyz:", "# (b, c, m, nsample) else: new_features = grouped_features else:", "numpy as np class FurthestSampling(Function): @staticmethod def forward(ctx, xyz, m):", "xyz: torch.Tensor, new_xyz: torch.Tensor = None, features: torch.Tensor = None,", "= xyz assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, m, _ =", "= torch.cuda.IntTensor(b, m, nclass).zero_() idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b,", "\"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup_Dilate, self).__init__() self.radius, self.nsample,", "KNNQueryExclude.apply class Le_QueryAndGroup_SameSize(nn.Module): \"\"\" Groups with a ball query of", "def backward(ctx, a=None, b=None): return None, None nearestneighbor = NearestNeighbor.apply", "xyz_trans = xyz.transpose(1, 2).contiguous() # BxNx3 -> Bx3xN grouped_xyz =", "radius: float32, Radius of ball nsample: int32, Maximum number of", "= torch.cuda.FloatTensor(b, c, n).zero_() grad_distribute_feature_data = grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b, n, m,", "None: if self.radius is not None: idx = ballquery(self.radius, 2*self.nsample,", "torch.sort(dist, dim=1) idx[i, :, :] = idxs[:, 0:nsample] ''' #", "to gather in the ball \"\"\" def __init__(self, radius=None, nsample=32,", "= xyz.size() m = new_xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_()", "n = features.size() _, m, nsample = idx.size() output =", "-> torch.Tensor: ''' :param ctx: :param max_feature: (b, c, n)", "= Interpolation.apply class Grouping(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx:", "(b, m, 3) :param label_stat: (b, n, nclass) :return: new_label_stat:", "nsample) tensor with the indicies of the features that form", "furthestsampling = FurthestSampling.apply class Gathering(Function): @staticmethod def forward(ctx, features, idx):", "a feature!\" new_features = grouped_xyz return new_features class GroupAll(nn.Module): \"\"\"", "xyz.repeat(1, m, 1).view(b, m * n, 3) # dist =", "m) :return: grad_max_feature: (b, c, n), None ''' distribute_idx, n", "nn from metrics.pointops import pointops_cuda import numpy as np class", "balls nsample: int, maximum number of features in the balls", "nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b, n, m, radius, nclass, new_xyz, xyz, label_stat, new_label_stat)", "tensor with the indicies of the features that form the", "''' # new_xyz_repeat = new_xyz.repeat(1, 1, n).view(b, m * n,", "ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup, self).__init__() self.radius,", "m) output = torch.cuda.FloatTensor(b, c, n) pointops_cuda.interpolation_forward_cuda(b, c, m, n,", "class Grouping(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor) ->", "a NxM matrix where dist[i,j] is the square norm between", "0, 1) y_norm = x_norm.view(1, -1) dist = x_norm +", "forward(ctx, max_feature: torch.Tensor, distribute_idx: torch.Tensor) -> torch.Tensor: ''' :param ctx:", "ctx.for_backwards = (idx, n) return output @staticmethod def backward(ctx, grad_out:", "return grad_features, None grouping = Grouping.apply class GroupingInt(Function): @staticmethod def", "m, 3) :return: distribute_idx: (b, m) \"\"\" assert max_xyz.is_contiguous() assert", "xyz.transpose(1, 2).contiguous() # BxNx3 -> Bx3xN grouped_xyz = grouping(xyz_trans, idx)", "grad_out: (b, c, n) output: grad_features: (b, c, m), None,", "import pointops_cuda import numpy as np class FurthestSampling(Function): @staticmethod def", "(b, m, nclass) idx: (b, m, nsample) ''' assert xyz.is_contiguous()", "-> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b, c, n)", "@staticmethod def forward(ctx, xyz, m): \"\"\" input: xyz: (b, n,", ":param distribute_idx: (b, m) :return: distribute_feature: (b, c, m) '''", "m): \"\"\" input: xyz: (b, n, 3) and n >", "None, None labelstat_and_ballquery = LabelStatAndBallQuery.apply def pairwise_distances(x, y=None): ''' Input:", "= torch.sort(dist, dim=1) idx[i, :, :] = idxs[:, 0:nsample] '''", "xyz.transpose(1, 2).unsqueeze(2) if features is not None: grouped_features = features.unsqueeze(2)", "features: torch.Tensor, idx: torch.Tensor, weight: torch.Tensor) -> torch.Tensor: \"\"\" Performs", "unknown: (b, n, 3), known: (b, m, 3) output: dist2:", "c, m) \"\"\" assert features.is_contiguous() assert idx.is_contiguous() b, c, n", "= torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b, n, m, radius, nclass, new_xyz,", "torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b, c, n) output: grad_features:", "FeatureGather.apply class LabelStatBallRange(Function): @staticmethod def forward(ctx, radius: float, xyz: torch.Tensor,", "# (b, m, nsample) idx2 = np.array([i for i in", "nsample) ( dist2: (b, m, nsample) ) \"\"\" if new_xyz", "return grouped_xyz, new_features class QueryAndGroup(nn.Module): \"\"\" Groups with a ball", "\"\"\" assert max_xyz.is_contiguous() assert xyz.is_contiguous() b, n, _ = max_xyz.size()", "of neighbor xyz: (b, n, 3) coordinates of the features", "Grouping(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor:", "features: (b, c, n) descriptors of the features output: new_features:", "b, n, nclass = label_stat.size() m = new_xyz.size(1) new_label_stat =", "idx, output) ctx.for_backwards = (idx, c, n) return output @staticmethod", "class Le_QueryAndGroup_SameSize(nn.Module): \"\"\" Groups with a ball query of radius", "** 2).sum(1).view(-1, 1) if y is not None: y_t =", "backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: \"\"\" input: grad_out:", "= xyz.size(1) distribute_idx = torch.cuda.IntTensor(b, m).zero_() pointops_cuda.featuredistribute_cuda(b, n, m, max_xyz,", "max_feature.size() m = distribute_idx.size(1) distribute_feature = torch.cuda.FloatTensor(b, c, m).zero_() pointops_cuda.featuregather_forward_cuda(b,", "(b, c, m, nsample) else: new_features = grouped_features else: assert", "= grouped_xyz return new_features class Le_QueryAndGroup(nn.Module): \"\"\" Groups with a", "of features in the balls xyz: torch.Tensor, (b, n, 3)", "torch.Tensor]: \"\"\" input: grad_out: (b, c, n) output: grad_features: (b,", "torch.sort(dist, dim=2) idx = idxs[:, :, 1:nsample+1].int() # ''' return", "n = xyz.size(1) ''' idx = torch.zeros(b, m, nsample).int().cuda() for", "m, nsample) #grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features is not", "\"\"\" if new_xyz is None: new_xyz = xyz b, m,", "features in the balls xyz: torch.Tensor, (b, n, 3) xyz", "def forward(ctx, radius: float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor)", "(b, m, nsample) ) \"\"\" if new_xyz is None: new_xyz", "output = torch.cuda.FloatTensor(b, c, n) pointops_cuda.interpolation_forward_cuda(b, c, m, n, features,", "@staticmethod def backward(ctx, a=None): return None, None, None, None ballquery", "n) output: new_features: (b, c+3, m, nsample) # grouped_idxs: (b,", "= idxs[:, 0:nsample] ''' # ''' # new_xyz_repeat = new_xyz.repeat(1,", "= distribute_idx.size(1) distribute_feature = torch.cuda.FloatTensor(b, c, m).zero_() pointops_cuda.featuregather_forward_cuda(b, n, m,", "_, m, nsample = idx.size() output = torch.cuda.LongTensor(b, c, m,", ":param ctx: :param radius: :param xyz: (b, n, 3) :param", "matrix where dist[i,j] is the square norm between x[i,:] and", "c, m) ''' assert max_feature.is_contiguous() assert distribute_idx.is_contiguous() b, c, n", "use_xyz=True): super(Gen_QueryAndGroupXYZ, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz", "None: new_xyz = xyz if idx is None: if self.radius", "pointops_cuda.grouping_backward_cuda(b, c, n, m, nsample, grad_out_data, idx, grad_features.data) return grad_features,", "None labelstat_ballrange = LabelStatBallRange.apply class LabelStatIdx(Function): @staticmethod def forward(ctx, nsample:", "idx = knnquery(self.nsample, xyz, new_xyz) # (b, m, nsample) xyz_trans", "centriods output: new_features: (b, m, nsample) \"\"\" if new_xyz is", "b, c, n = features.size() _, m, nsample = idx.size()", "_ = max_xyz.size() m = xyz.size(1) distribute_idx = torch.cuda.IntTensor(b, m).zero_()", "(b, m) \"\"\" assert max_xyz.is_contiguous() assert xyz.is_contiguous() b, n, _", "idx is None: if self.radius is not None: idx =", "= knnquery(self.nsample, xyz, new_xyz) # (b, m, nsample) #xyz_trans =", "m = new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b, n,", "idx, weight, grad_features.data) return grad_features, None, None interpolation = Interpolation.apply", "idx: torch.Tensor) -> torch.Tensor: \"\"\" input: features: (b, c, n),", "nclass, new_xyz, xyz, label_stat, idx, new_label_stat) return new_label_stat, idx @staticmethod", "3, m, nsample) # grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b,", "def backward(ctx, grad_out): idx, c, n = ctx.for_backwards b, m", "class LabelStatIdx(Function): @staticmethod def forward(ctx, nsample: int, label_stat: torch.Tensor, idx:", "m, radius, nclass, new_xyz, xyz, label_stat, new_label_stat) return new_label_stat @staticmethod", "idx = ballquery(self.radius, self.nsample, xyz, new_xyz) else: idx = knnquery(self.nsample,", "if idx is None: if self.radius is not None: idx", "\"\"\" Find the three nearest neighbors of unknown in known", "descriptors of the features output: new_features: (b, c+3, 1, N)", "idx: (b, m, nsample) :return: new_label_stat: (b, m, nclass) '''", "nsample=32, use_xyz=True): super(Le_QueryAndGroup_SameSize, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample,", "use_xyz def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None, features:", "2).unsqueeze(-1) if features is not None: grouped_features = grouping(features, idx)", "idx2 = idx2[:self.nsample] idx = idx[:, :, idx2] xyz_trans =", "LabelStatAndBallQuery.apply def pairwise_distances(x, y=None): ''' Input: x is a Nxd", "max_xyz.size() m = xyz.size(1) distribute_idx = torch.cuda.IntTensor(b, m).zero_() pointops_cuda.featuredistribute_cuda(b, n,", ":], xyz[i, :, :]) [_, idxs] = torch.sort(dist, dim=1) idx[i,", "ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup, self).__init__() self.radius,", "n = ctx.for_backwards b, m = idx.size() grad_features = torch.cuda.FloatTensor(b,", "m, radius, nsample, new_xyz, xyz, idx) return idx @staticmethod def", "idx, grad_features.data) return grad_features, None grouping = Grouping.apply class GroupingInt(Function):", "assert self.use_xyz, \"Cannot have not features and not use xyz", "m, nsample) tensor with the indicies of the features that", "new_features = grouped_xyz return new_features class QueryAndGroup_Dilate(nn.Module): \"\"\" Groups with", "m, nsample, features, idx, output) ctx.for_backwards = (idx, n) return", "x_norm + y_norm - 2.0 * torch.mm(x, y_t) import numpy", "m = idx.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data =", "nsample) :return: new_label_stat: (b, m, nclass) ''' assert label_stat.is_contiguous() assert", ":param label_stat: (b, n, nclass) :param idx: (b, m, nsample)", "grad_max_feature = torch.cuda.FloatTensor(b, c, n).zero_() grad_distribute_feature_data = grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b, n,", "idx = torch.cuda.IntTensor(b, m, nsample).zero_() dist2 = torch.cuda.FloatTensor(b, m, nsample).zero_()", "(new_xyz.repeat(1, 1, n).view(b, m * n, 3) - xyz.repeat(1, m,", "metrics.pointops import pointops_cuda import numpy as np class FurthestSampling(Function): @staticmethod", "= FeatureGather.apply class LabelStatBallRange(Function): @staticmethod def forward(ctx, radius: float, xyz:", "backward(ctx, a=None): return None, None, None, None ballquery = BallQuery.apply", "nclass).zero_() idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b, n, m, radius,", "grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m, nsample) return", "# (b, c, m, nsample) if self.use_xyz: #new_features = torch.cat([grouped_xyz,", "three nearest neighbors idx: (b, n, 3) index of 3", "radius, nsample, new_xyz, xyz, idx) return idx @staticmethod def backward(ctx,", "torch.Tensor, (b, n, 3) xyz coordinates of the features new_xyz:", "torch.Tensor, idx: torch.Tensor) -> torch.Tensor: \"\"\" input: features: (b, c,", "c, grad_distribute_feature_data, distribute_idx, grad_max_feature.data) return grad_max_feature, None featuregather = FeatureGather.apply", "backward(ctx): return None, None, None knnquery_exclude = KNNQueryExclude.apply class Le_QueryAndGroup_SameSize(nn.Module):", "m, nclass) ''' assert label_stat.is_contiguous() assert idx.is_contiguous() b, n, nclass", "idx: torch.Tensor = None) -> torch.Tensor: def forward(self, xyz: torch.Tensor,", "assert max_feature.is_contiguous() assert distribute_idx.is_contiguous() b, c, n = max_feature.size() m", "GroupingInt.apply class BallQuery(Function): @staticmethod def forward(ctx, radius: float, nsample: int,", "nclass) :return: new_label_stat: (b, m, nclass) idx: (b, m, nsample)", "def backward(xyz, a=None): return None, None furthestsampling = FurthestSampling.apply class", "grad_out_data = grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b, c, n, m, nsample, grad_out_data, idx,", "forward(ctx, features, idx): \"\"\" input: features: (b, c, n), idx", "c, m).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b, c, n, m, grad_out_data,", "xyz, new_xyz) else: idx = knnquery(self.nsample, xyz, new_xyz) # (b,", "c, n = grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, m).zero_() grad_out_data", "m, nsample) \"\"\" assert xyz.size() == new_xyz.size() if new_xyz is", "else: # idx = knnquery_naive(self.nsample, xyz, new_xyz) # (b, m,", "idxs: (b, n) output: new_features: (b, c+3, m, nsample) #", "= idx.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_idx_cuda(b, n, m,", "= (distribute_idx, n) return distribute_feature @staticmethod def backward(ctx, grad_distribute_feature: torch.Tensor):", "new_xyz is None: new_xyz = xyz if idx is None:", "with a ball query of radius parameters: radius: float32, Radius", "\"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup, self).__init__() self.radius, self.nsample,", "grad_features, None grouping = Grouping.apply class GroupingInt(Function): @staticmethod def forward(ctx,", "3) - xyz.repeat(1, m, 1).view(b, m * n, 3)).pow(2).sum(dim=2).view(b, m,", "KNNQuery(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor", "ctx: :param radius: :param nsample: :param xyz: (b, n, 3)", "features, idx, output) ctx.for_backwards = (idx, c, n) return output", "coordinates of the features new_xyz: ignored torch features: (b, c,", "distribute_feature: (b, c, m) ''' assert max_feature.is_contiguous() assert distribute_idx.is_contiguous() b,", "n, 3) coordinates of the features new_xyz: ignored torch features:", "forward(ctx, nsample: int, label_stat: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: '''", "backward(ctx, a=None, b=None): return None, None nearestneighbor = NearestNeighbor.apply class", "def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz", "#grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m, nsample) #", "grad_out_data = grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b, c, n, m, grad_out_data, idx, grad_features.data)", "= xyz.size(1) ''' idx = torch.zeros(b, m, nsample).int().cuda() for i", "return None, None, None knnquery = KNNQuery.apply class KNNQueryExclude(Function): @staticmethod", "[_, idxs] = torch.sort(dist, dim=2) idx = idxs[:, :, 1:nsample+1].int()", "dist2 = torch.cuda.FloatTensor(b, m, nsample).zero_() pointops_cuda.knnquery_cuda(b, n, m, nsample, xyz,", "ignored torch features: (b, c, n) descriptors of the features", "(b, c, m) features descriptors to be interpolated from idx:", "pairwise_distances(x, y=None): ''' Input: x is a Nxd matrix y", "if self.use_xyz: #new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3,", "3) centriods output: idx: (b, m, nsample) ( dist2: (b,", "m).zero_() pointops_cuda.featuregather_forward_cuda(b, n, m, c, max_feature, distribute_idx, distribute_feature) ctx.for_backwards =", "= (idx, weight, m) output = torch.cuda.FloatTensor(b, c, n) pointops_cuda.interpolation_forward_cuda(b,", "''' :param ctx: :param radius: :param nsample: :param xyz: (b,", "\"\"\" idx, n = ctx.for_backwards b, c, m, nsample =", "None furthestsampling = FurthestSampling.apply class Gathering(Function): @staticmethod def forward(ctx, features,", "unknown.size() m = known.size(1) dist2 = torch.cuda.FloatTensor(b, n, 3) idx", "label_stat: (b, n, nclass) :return: new_label_stat: (b, m, nclass) idx:", "m, nsample).int().cuda() for i in range(b): dist = pairwise_distances(new_xyz[i, :,", "else: new_features = grouped_features else: assert self.use_xyz, \"Cannot have not", "in the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup,", "nsample) idx2 = np.array([i for i in range(2*self.nsample)]) np.random.shuffle(idx2) idx2", "__init__(self, use_xyz: bool = True): super(GroupAll, self).__init__() self.use_xyz = use_xyz", "dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' x_norm = (x ** 2).sum(1).view(-1, 1)", "__init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz =", "ctx: :param max_xyz: (b, n, 3) :param xyz: (b, m,", "b, m, _ = new_xyz.size() n = xyz.size(1) idx =", "@staticmethod def backward(ctx): return None, None, None knnquery_naive = KNNQueryNaive.apply", "np.random.shuffle(idx2) idx2 = idx2[:self.nsample] idx = idx[:, :, idx2] xyz_trans", "Interpolation(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor, weight: torch.Tensor)", "if self.radius is not None: idx = ballquery(self.radius, self.nsample, xyz,", "n = idx.size(1) ctx.interpolation_for_backward = (idx, weight, m) output =", "radius: float, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor) -> torch.Tensor:", "idx.is_contiguous() b, c, n = features.size() m = idx.size(1) output", "3) centriods output: idx: (b, m, nsample) \"\"\" if new_xyz", "in range(b): dist = pairwise_distances(new_xyz[i, :, :], xyz[i, :, :])", "nsample, grad_out_data, idx, grad_features.data) return grad_features, None grouping = Grouping.apply", "m = new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() idx =", "idx, dist2) return idx @staticmethod def backward(ctx, a=None): return None,", "# (b, m, nsample) #xyz_trans = xyz.transpose(1, 2).contiguous() #grouped_xyz =", "grad_distribute_feature.size() grad_max_feature = torch.cuda.FloatTensor(b, c, n).zero_() grad_distribute_feature_data = grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b,", "distribute_idx = torch.cuda.IntTensor(b, m).zero_() pointops_cuda.featuredistribute_cuda(b, n, m, max_xyz, xyz, distribute_idx)", "* torch.mm(x, y_t) import numpy as np return torch.clamp(dist, 0.0,", "knnquery = KNNQuery.apply class KNNQueryExclude(Function): @staticmethod def forward(ctx, nsample: int,", "nsample: int, maximum number of features in the balls xyz:", "\"\"\" :param ctx: :param max_xyz: (b, n, 3) :param xyz:", "None nearestneighbor = NearestNeighbor.apply class Interpolation(Function): @staticmethod def forward(ctx, features:", "= torch.cuda.FloatTensor(b, n, 3) idx = torch.cuda.IntTensor(b, n, 3) pointops_cuda.nearestneighbor_cuda(b,", "(new_xyz_repeat - xyz_repeat).pow(2).sum(dim=2).view(b, m, n) dist = (new_xyz.repeat(1, 1, n).view(b,", "\"\"\" if new_xyz is None: new_xyz = xyz assert xyz.is_contiguous()", "= torch.sort(dist, dim=2) idx = idxs[:, :, 0:nsample].int() # '''", "idx).squeeze(1).int() # (b, m, nsample) #grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if", "n) idx: idx of neighbors # idxs: (b, n) output:", ":param ctx: :param grad_distribute_feature: (b, c, m) :return: grad_max_feature: (b,", "\"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Gen_QueryAndGroupXYZ, self).__init__() self.radius, self.nsample,", "xyz assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, m, _ = new_xyz.size()", "idx.size() output = torch.cuda.LongTensor(b, c, m, nsample) pointops_cuda.grouping_int_forward_cuda(b, c, n,", "(b, c+3, 1, N) tensor \"\"\" grouped_xyz = xyz.transpose(1, 2).unsqueeze(2)", "nearest neighbors idx: (b, n, 3) index of 3 nearest", "grouped_xyz return new_features class QueryAndGroup_Dilate(nn.Module): \"\"\" Groups with a ball", "= idx.size() output = torch.cuda.FloatTensor(b, c, m, nsample) pointops_cuda.grouping_forward_cuda(b, c,", "max_xyz: torch.Tensor, xyz: torch.Tensor) -> torch.Tensor: \"\"\" :param ctx: :param", "ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_OnlyFeature, self).__init__() self.radius,", "idx: torch.Tensor = None) -> torch.Tensor: \"\"\" input: xyz: (b,", "is not None: grouped_features = grouping(features, idx) if self.use_xyz: new_features", "grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample) grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1)", "class FeatureGather(Function): @staticmethod def forward(ctx, max_feature: torch.Tensor, distribute_idx: torch.Tensor) ->", "y is not None: y_t = torch.transpose(y, 0, 1) y_norm", "xyz.is_contiguous() assert new_xyz.is_contiguous() b, n, _ = xyz.size() m =", "None: grouped_features = grouping(features, idx) if self.use_xyz: new_features = torch.cat([grouped_xyz,", "3) three nearest neighbors of the target features in features", "new_xyz: ignored torch features: (b, c, n) descriptors of the", "Performs weight linear interpolation on 3 features input: features: (b,", "pointops_cuda.featuredistribute_cuda(b, n, m, max_xyz, xyz, distribute_idx) return distribute_idx @staticmethod def", "idxs[:, 0:nsample] ''' # ''' # new_xyz_repeat = new_xyz.repeat(1, 1,", "idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b, n, m, radius, nsample,", "a=None, b=None): return None, None nearestneighbor = NearestNeighbor.apply class Interpolation(Function):", "return idx @staticmethod def backward(ctx): return None, None, None knnquery_naive", "n, 3) :param xyz: (b, m, 3) :return: distribute_idx: (b,", "b, n, _ = max_xyz.size() m = xyz.size(1) distribute_idx =", "xyz: (b, n, 3) :param new_xyz: (b, m, 3) :param", "n, nclass = label_stat.size() m = idx.size(1) new_label_stat = torch.cuda.IntTensor(b,", "index of 3 nearest neighbors \"\"\" assert unknown.is_contiguous() assert known.is_contiguous()", "def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor = None)", "m = idx.size(1) output = torch.cuda.FloatTensor(b, c, m) pointops_cuda.gathering_forward_cuda(b, c,", "(b, m, 3) centers of the ball query output: (b,", "m, nsample) \"\"\" assert features.is_contiguous() assert idx.is_contiguous() b, c, n", "new_xyz: (b, m, 3) centriods features: (b, c, n) idx:", "- xyz.repeat(1, m, 1).view(b, m * n, 3)).pow(2).sum(dim=2).view(b, m, n)", "m, nsample, nclass, label_stat, idx, new_label_stat) return new_label_stat @staticmethod def", "forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None, features: torch.Tensor =", "QueryAndGroup_Dilate(nn.Module): \"\"\" Groups with a ball query of radius parameters:", "class KNNQueryNaive(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz:", "= features.size() n = idx.size(1) ctx.interpolation_for_backward = (idx, weight, m)", ":param new_xyz: (b, m, 3) :param label_stat: (b, n, nclass)", "-> torch.Tensor: \"\"\" :param ctx: :param max_xyz: (b, n, 3)", "torch.Tensor: \"\"\" :param ctx: :param max_xyz: (b, n, 3) :param", "as np return torch.clamp(dist, 0.0, np.inf) class KNNQueryNaive(Function): @staticmethod def", "use_xyz=True): super(QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz", "= idxs[:, :, 0:nsample].int() # ''' return idx @staticmethod def", "torch.Tensor: def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None) ->", "int32, Maximum number of features to gather in the ball", "from typing import Tuple import torch from torch.autograd import Function", "None, None, None labelstat_and_ballquery = LabelStatAndBallQuery.apply def pairwise_distances(x, y=None): '''", "@staticmethod def backward(ctx, a=None, b=None): return None, None, None, None,", "m), None, None \"\"\" idx, weight, m = ctx.interpolation_for_backward b,", "n, m, grad_out_data, idx, weight, grad_features.data) return grad_features, None, None", "new_xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.ballquery_cuda(b, n, m, radius,", "float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor): '''", "dist2: (b, m, nsample) ) \"\"\" if new_xyz is None:", "\"\"\" def __init__(self, use_xyz: bool = True): super(GroupAll, self).__init__() self.use_xyz", "distribute_idx) return distribute_idx @staticmethod def backward(ctx, a=None): return None, None", "radius: float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor) -> torch.Tensor:", "grouped_features = grouping(features, idx) if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features],", "new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() idx = torch.cuda.IntTensor(b, m,", "None: if self.radius is not None: idx = ballquery(self.radius, self.nsample,", "= new_xyz.size() n = xyz.size(1) ''' idx = torch.zeros(b, m,", "self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz #def forward(self,", "not None: idx = ballquery(self.radius, self.nsample, xyz, new_xyz) else: idx", "# grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample) #grouped_xyz", "idx : (b, m) tensor output: (b, c, m) \"\"\"", "(b, n, 3) coordinates of the features new_xyz: ignored torch", "(y ** 2).sum(1).view(1, -1) else: y_t = torch.transpose(x, 0, 1)", "(b, m, nsample) idx = knnquery(self.nsample, xyz, new_xyz) # (b,", "torch.cuda.FloatTensor(b, c, m) pointops_cuda.gathering_forward_cuda(b, c, n, m, features, idx, output)", "given then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' x_norm", "True): super(GroupAll, self).__init__() self.use_xyz = use_xyz def forward(self, xyz: torch.Tensor,", "idx = torch.zeros(b, m, nsample).int().cuda() for i in range(b): dist", "m, nsample) else: new_features = grouped_features else: assert self.use_xyz, \"Cannot", "nsample = idx.size() output = torch.cuda.LongTensor(b, c, m, nsample) pointops_cuda.grouping_int_forward_cuda(b,", "BallQuery.apply class FeatureDistribute(Function): @staticmethod def forward(ctx, max_xyz: torch.Tensor, xyz: torch.Tensor)", "features input: features: (b, c, m) features descriptors to be", "xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_() dist2 = torch.cuda.FloatTensor(b, m,", "the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup, self).__init__()", "nsample, nclass, label_stat, idx, new_label_stat) return new_label_stat @staticmethod def backward(ctx,", "label_stat.size() m = new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() idx", "import Tuple import torch from torch.autograd import Function import torch.nn", "features in features weight: (b, n, 3) weights output: (b,", "c, n) return output @staticmethod def backward(ctx, grad_out): idx, c,", "n), idx : (b, m, nsample) containing the indicies of", "idxs] = torch.sort(dist, dim=2) idx = idxs[:, :, 1:nsample+1].int() #", "use_xyz=True): super(Le_QueryAndGroup_OnlyFeature, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz", "knnquery(2*self.nsample, xyz, new_xyz) # (b, m, nsample) idx2 = np.array([i", "nsample, xyz, new_xyz, idx, dist2) return idx @staticmethod def backward(ctx,", "b, n, _ = xyz.size() m = new_xyz.size(1) idx =", "return output @staticmethod def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:", "pointops_cuda.furthestsampling_cuda(b, n, m, xyz, temp, idx) return idx @staticmethod def", "torch.cuda.FloatTensor(b, c, m, nsample) pointops_cuda.grouping_forward_cuda(b, c, n, m, nsample, features,", "new_xyz.is_contiguous() b, m, _ = new_xyz.size() n = xyz.size(1) idx", "#if idx is None: if self.radius is not None: idx", "torch.Tensor: \"\"\" Performs weight linear interpolation on 3 features input:", "self.use_xyz = radius, nsample, use_xyz #def forward(self, xyz: torch.Tensor, new_xyz:", "temp = torch.cuda.FloatTensor(b, n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b, n, m, xyz, temp, idx)", "idx.size(1) output = torch.cuda.FloatTensor(b, c, m) pointops_cuda.gathering_forward_cuda(b, c, n, m,", "y[j,:] if y is not given then use 'y=x'. i.e.", "= torch.cuda.FloatTensor(b, c, m).zero_() pointops_cuda.featuregather_forward_cuda(b, n, m, c, max_feature, distribute_idx,", ":param grad_distribute_feature: (b, c, m) :return: grad_max_feature: (b, c, n),", "idx[i, :, :] = idxs[:, 0:nsample] ''' # ''' #", "torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b, c,", "xyz b, m, _ = new_xyz.size() n = xyz.size(1) '''", "ballquery(self.radius, self.nsample, xyz, new_xyz) else: # idx = knnquery_naive(self.nsample, xyz,", "super(Le_QueryAndGroup_SameSize, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz def", "features, idx, weight, output) return output @staticmethod def backward(ctx, grad_out:", "m = distribute_idx.size(1) distribute_feature = torch.cuda.FloatTensor(b, c, m).zero_() pointops_cuda.featuregather_forward_cuda(b, n,", "a=None): return None, None, None, None labelstat_ballrange = LabelStatBallRange.apply class", "output: (b, c, n) tensor of the interpolated features \"\"\"", "grad_distribute_feature_data, distribute_idx, grad_max_feature.data) return grad_max_feature, None featuregather = FeatureGather.apply class", "None, None, None knnquery_exclude = KNNQueryExclude.apply class Le_QueryAndGroup_SameSize(nn.Module): \"\"\" Groups", "idx2] xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans, idx) #", "n, features, idx, weight, output) return output @staticmethod def backward(ctx,", "c, n, m, grad_out_data, idx, grad_features.data) return grad_features, None gathering", ": (b, m, nsample) containing the indicies of features to", "forward(ctx, max_xyz: torch.Tensor, xyz: torch.Tensor) -> torch.Tensor: \"\"\" :param ctx:", "Output: dist is a NxM matrix where dist[i,j] is the", "@staticmethod def forward(ctx, radius: float, nsample: int, xyz: torch.Tensor, new_xyz:", "self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, 1,", "torch.sqrt(dist2), idx @staticmethod def backward(ctx, a=None, b=None): return None, None", "forward(ctx, radius: float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor) ->", "is a Nxd matrix y is an optional Mxd matirx", "is None: new_xyz = xyz #if idx is None: if", "None, None, None, None, None labelstat_and_ballquery = LabelStatAndBallQuery.apply def pairwise_distances(x,", "nclass = label_stat.size() m = new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m,", "in features weight: (b, n, 3) weights output: (b, c,", "le new_features = grouped_features # (b, c, m, nsample) else:", "nsample, use_xyz def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None,", "input: grad_out: (b, c, n) output: grad_features: (b, c, m),", "= Gathering.apply class NearestNeighbor(Function): @staticmethod def forward(ctx, unknown: torch.Tensor, known:", "= torch.transpose(y, 0, 1) y_norm = (y ** 2).sum(1).view(1, -1)", "torch.cuda.IntTensor(b, m, nsample).zero_() dist2 = torch.cuda.FloatTensor(b, m, nsample).zero_() pointops_cuda.knnquery_cuda(b, n,", "unknown: torch.Tensor, known: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" Find the", "the features new_xyz: (b, m, 3) centriods output: new_features: (b,", "n, nclass) :return: new_label_stat: (b, m, nclass) ''' assert xyz.is_contiguous()", "knnquery_naive(self.nsample, xyz, new_xyz) # (b, m, nsample) idx = knnquery(self.nsample,", "idx) # (b, 3, m, nsample) # grouped_idxs = grouping(idxs.unsqueeze(1).float(),", "m) \"\"\" assert xyz.is_contiguous() b, n, _ = xyz.size() idx", "def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_OnlyFeature, self).__init__() self.radius, self.nsample, self.use_xyz", "\"\"\" input: grad_out: (b, c, m, nsample) output: (b, c,", "= knnquery(2*self.nsample, xyz, new_xyz) # (b, m, nsample) idx2 =", "nsample) \"\"\" if new_xyz is None: new_xyz = xyz if", "descriptors to be interpolated from idx: (b, n, 3) three", "label_stat.is_contiguous() assert idx.is_contiguous() b, n, nclass = label_stat.size() m =", "= torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_idx_cuda(b, n, m, nsample, nclass, label_stat,", "float, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor) -> torch.Tensor: '''", "assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous() b, n, nclass =", "- xyz_repeat).pow(2).sum(dim=2).view(b, m, n) dist = (new_xyz.repeat(1, 1, n).view(b, m", "knnquery_naive(self.nsample, xyz, new_xyz) # (b, m, nsample) idx = knnquery(2*self.nsample,", "is not None: grouped_features = features.unsqueeze(2) if self.use_xyz: new_features =", "def backward(ctx, a=None): return None, None, None knnquery = KNNQuery.apply", "distribute_feature) ctx.for_backwards = (distribute_idx, n) return distribute_feature @staticmethod def backward(ctx,", "new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b, n, m, radius,", "centers of the ball query output: (b, m, nsample) tensor", "xyz: torch.Tensor) -> torch.Tensor: \"\"\" :param ctx: :param max_xyz: (b,", "3) xyz coordinates of the features new_xyz: torch.Tensor, (b, m,", "# grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample) grouped_xyz", "forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None) -> torch.Tensor: \"\"\"", "as a feature!\" new_features = grouped_xyz return grouped_xyz, new_features class", "(b, c, m) :return: grad_max_feature: (b, c, n), None '''", "grouped_idxs: (b, m, nsample) \"\"\" assert xyz.size() == new_xyz.size() if", "= xyz.size() idx = torch.cuda.IntTensor(b, m) temp = torch.cuda.FloatTensor(b, n).fill_(1e10)", "c, m) features descriptors to be interpolated from idx: (b,", "xyz coordinates of the features new_xyz: torch.Tensor, (b, m, 3)", "m, nsample) ) \"\"\" if new_xyz is None: new_xyz =", "in the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup,", "@staticmethod def forward(ctx, features, idx): \"\"\" input: features: (b, c,", "range(b): dist = pairwise_distances(new_xyz[i, :, :], xyz[i, :, :]) [_,", "xyz, new_xyz) # (b, m, nsample) #xyz_trans = xyz.transpose(1, 2).contiguous()", "c, n) output: grad_features: (b, c, m), None, None \"\"\"", "self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, m,", "idx : (b, m, nsample) containing the indicies of features", "(b, c, n), idx : (b, m, nsample) containing the", "features: (b, c, n), idx : (b, m, nsample) containing", "(b, m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous() # BxNx3 ->", "n, nclass) :param idx: (b, m, nsample) :return: new_label_stat: (b,", "nsample) if self.use_xyz: #new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b,", "grouped_features = features.unsqueeze(2) if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1)", "ctx: :param grad_distribute_feature: (b, c, m) :return: grad_max_feature: (b, c,", "def forward(ctx, radius: float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor,", "GroupAll(nn.Module): \"\"\" Groups all features \"\"\" def __init__(self, use_xyz: bool", "the features new_xyz: torch.Tensor, (b, m, 3) centers of the", "the indicies of the features that form the query balls", "= xyz.transpose(1, 2).unsqueeze(2) if features is not None: grouped_features =", "c, n) idx: idx of neighbors # idxs: (b, n)", "is an optional Mxd matirx Output: dist is a NxM", "None labelstat_idx = LabelStatIdx.apply class LabelStatAndBallQuery(Function): @staticmethod def forward(ctx, radius:", "new_xyz) else: # idx = knnquery_naive(self.nsample, xyz, new_xyz) # (b,", "parameters: radius: float32, Radius of ball nsample: int32, Maximum number", "3) pointops_cuda.nearestneighbor_cuda(b, n, m, unknown, known, dist2, idx) return torch.sqrt(dist2),", "torch.Tensor, new_xyz: torch.Tensor = None) -> torch.Tensor: \"\"\" input: xyz:", "= FeatureDistribute.apply class FeatureGather(Function): @staticmethod def forward(ctx, max_feature: torch.Tensor, distribute_idx:", "grad_max_feature: (b, c, n), None ''' distribute_idx, n = ctx.for_backwards", "distribute_feature = torch.cuda.FloatTensor(b, c, m).zero_() pointops_cuda.featuregather_forward_cuda(b, n, m, c, max_feature,", "1:nsample+1].int() # ''' return idx @staticmethod def backward(ctx): return None,", "new_label_stat: (b, m, nclass) idx: (b, m, nsample) ''' assert", "m, xyz, temp, idx) return idx @staticmethod def backward(xyz, a=None):", "nearestneighbor = NearestNeighbor.apply class Interpolation(Function): @staticmethod def forward(ctx, features: torch.Tensor,", "m = xyz.size(1) distribute_idx = torch.cuda.IntTensor(b, m).zero_() pointops_cuda.featuredistribute_cuda(b, n, m,", "weight.is_contiguous() b, c, m = features.size() n = idx.size(1) ctx.interpolation_for_backward", "m, nsample) idx = knnquery(self.nsample, xyz, new_xyz) # (b, m,", "KNNQueryExclude(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor", "@staticmethod def forward(ctx, nsample: int, label_stat: torch.Tensor, idx: torch.Tensor) ->", "new_xyz: torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\" KNN Indexing input:", "nsample: :param xyz: (b, n, 3) :param new_xyz: (b, m,", "nsample) # le new_features = grouped_features # (b, c, m,", "new_xyz: (b, m, 3) centriods output: new_features: (b, m, nsample)", "output: new_features: (b, m, nsample) \"\"\" if new_xyz is None:", "assert features.is_contiguous() assert idx.is_contiguous() b, c, n = features.size() _,", "None, None, None labelstat_ballrange = LabelStatBallRange.apply class LabelStatIdx(Function): @staticmethod def", "(b, n, nclass) :return: new_label_stat: (b, m, nclass) idx: (b,", "nsample: int32, Number of neighbor xyz: (b, n, 3) coordinates", "-> Tuple[torch.Tensor, torch.Tensor]: \"\"\" Find the three nearest neighbors of", "backward(xyz, a=None): return None, None furthestsampling = FurthestSampling.apply class Gathering(Function):", "def backward(ctx, a=None): return None, None, None labelstat_idx = LabelStatIdx.apply", "m, nclass) idx: (b, m, nsample) ''' assert xyz.is_contiguous() assert", "assert unknown.is_contiguous() assert known.is_contiguous() b, n, _ = unknown.size() m", "= None, features: torch.Tensor = None, idx: torch.Tensor = None)", "idx = ballquery(self.radius, self.nsample, xyz, new_xyz) else: # idx =", "= None) -> Tuple[torch.Tensor]: \"\"\" input: xyz: (b, n, 3)", "return None, None, None, None ballquery = BallQuery.apply class FeatureDistribute(Function):", "in the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_SameSize,", "c, m).zero_() pointops_cuda.featuregather_forward_cuda(b, n, m, c, max_feature, distribute_idx, distribute_feature) ctx.for_backwards", "n, m, radius, nsample, nclass, new_xyz, xyz, label_stat, idx, new_label_stat)", "= KNNQuery.apply class KNNQueryExclude(Function): @staticmethod def forward(ctx, nsample: int, xyz:", "assert distribute_idx.is_contiguous() b, c, n = max_feature.size() m = distribute_idx.size(1)", "coordinates of the features new_xyz: (b, m, 3) centriods output:", "ctx: :param max_feature: (b, c, n) :param distribute_idx: (b, m)", "feature!\" new_features = grouped_xyz return new_features class Le_QueryAndGroup(nn.Module): \"\"\" Groups", "FurthestSampling.apply class Gathering(Function): @staticmethod def forward(ctx, features, idx): \"\"\" input:", "idx.is_contiguous() b, c, n = features.size() _, m, nsample =", "c, n = features.size() m = idx.size(1) output = torch.cuda.FloatTensor(b,", "ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup_Dilate, self).__init__() self.radius,", "n, _ = max_xyz.size() m = xyz.size(1) distribute_idx = torch.cuda.IntTensor(b,", "of radius parameters: radius: float32, Radius of ball nsample: int32,", "idx = ballquery(self.radius, 2*self.nsample, xyz, new_xyz) else: # idx =", "matrix y is an optional Mxd matirx Output: dist is", "None, None knnquery = KNNQuery.apply class KNNQueryExclude(Function): @staticmethod def forward(ctx,", "new_xyz) # (b, m, nsample) idx = knnquery(self.nsample, xyz, new_xyz)", "N) tensor \"\"\" grouped_xyz = xyz.transpose(1, 2).unsqueeze(2) if features is", "in the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Gen_QueryAndGroupXYZ,", "= torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.ballquery_cuda(b, n, m, radius, nsample, new_xyz,", "m) pointops_cuda.gathering_forward_cuda(b, c, n, m, features, idx, output) ctx.for_backwards =", "= torch.cuda.FloatTensor(b, n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b, n, m, xyz, temp, idx) return", "dist2, idx) return torch.sqrt(dist2), idx @staticmethod def backward(ctx, a=None, b=None):", ":return: new_label_stat: (b, m, nclass) idx: (b, m, nsample) '''", "[_, idxs] = torch.sort(dist, dim=1) idx[i, :, :] = idxs[:,", "the target features in features weight: (b, n, 3) weights", "(b, n, 3) :param new_xyz: (b, m, 3) :param label_stat:", "= torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b, c, n,", ":, :], xyz[i, :, :]) [_, idxs] = torch.sort(dist, dim=1)", "m, 1).view(b, m * n, 3) # dist = (new_xyz_repeat", "labelstat_idx = LabelStatIdx.apply class LabelStatAndBallQuery(Function): @staticmethod def forward(ctx, radius: float,", "def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None, features: torch.Tensor", "new_xyz.size() if new_xyz is None: new_xyz = xyz if idx", "b=None): return None, None, None, None, None labelstat_and_ballquery = LabelStatAndBallQuery.apply", "n, 3) - xyz.repeat(1, m, 1).view(b, m * n, 3)).pow(2).sum(dim=2).view(b,", "(b, c, n), None ''' distribute_idx, n = ctx.for_backwards b,", "def backward(ctx, a=None): return None, None, None, None labelstat_ballrange =", "(b, n, 3) coordinates of the features new_xyz: (b, n,", "ballquery = BallQuery.apply class FeatureDistribute(Function): @staticmethod def forward(ctx, max_xyz: torch.Tensor,", "c+3, 1, N) tensor \"\"\" grouped_xyz = xyz.transpose(1, 2).unsqueeze(2) if", "grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b, c, n, m, grad_out_data, idx, weight, grad_features.data) return", "a feature!\" new_features = grouped_xyz return new_features class QueryAndGroup_Dilate(nn.Module): \"\"\"", "torch features: (b, c, n) descriptors of the features output:", "nsample) return grouped_xyz class Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\" Groups with a ball", "np.inf) class KNNQueryNaive(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor,", "(b, c, m, nsample) \"\"\" assert features.is_contiguous() assert idx.is_contiguous() b,", "b, m = idx.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data", "* n, 3) - xyz.repeat(1, m, 1).view(b, m * n,", "xyz.is_contiguous() assert new_xyz.is_contiguous() b, m, _ = new_xyz.size() n =", "known: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" Find the three nearest", "balls \"\"\" assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, n, _ =", "ball nsample: int32, Maximum number of features to gather in", "= idx.size() output = torch.cuda.LongTensor(b, c, m, nsample) pointops_cuda.grouping_int_forward_cuda(b, c,", "backward(ctx, a=None): return None, None, None knnquery = KNNQuery.apply class", "nsample) xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans, idx) #", "max_xyz, xyz, distribute_idx) return distribute_idx @staticmethod def backward(ctx, a=None): return", "(b, m, nsample) idx2 = np.array([i for i in range(2*self.nsample)])", "xyz #if idx is None: if self.radius is not None:", "assert idx.is_contiguous() assert weight.is_contiguous() b, c, m = features.size() n", "is None: new_xyz = xyz if idx is None: if", "of the features output: new_features: (b, c+3, 1, N) tensor", "ball query output: (b, m, nsample) tensor with the indicies", "features.size() _, m, nsample = idx.size() output = torch.cuda.LongTensor(b, c,", "xyz[i, :, :]) [_, idxs] = torch.sort(dist, dim=1) idx[i, :,", "new_features: (b, c+3, m, nsample) # grouped_idxs: (b, m, nsample)", "m, 3) centriods output: idx: (b, m, nsample) ( dist2:", "n, 3) :param new_xyz: (b, m, 3) :param label_stat: (b,", "not use xyz as a feature!\" new_features = grouped_xyz return", "_, m, nsample = idx.size() output = torch.cuda.FloatTensor(b, c, m,", "the balls nsample: int, maximum number of features in the", "idx, n = ctx.for_backwards b, c, m, nsample = grad_out.size()", "None, None, None labelstat_idx = LabelStatIdx.apply class LabelStatAndBallQuery(Function): @staticmethod def", "# (b, 3, m, nsample) # grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int()", "idx @staticmethod def backward(ctx, a=None): return None, None, None, None", "def __init__(self, use_xyz: bool = True): super(GroupAll, self).__init__() self.use_xyz =", "n, 3) l2 distance to the three nearest neighbors idx:", "None, None, None, None labelstat_ballrange = LabelStatBallRange.apply class LabelStatIdx(Function): @staticmethod", "def backward(ctx, a=None): return None, None grouping_int = GroupingInt.apply class", "and y[j,:] if y is not given then use 'y=x'.", "assert features.is_contiguous() assert idx.is_contiguous() b, c, n = features.size() m", "nsample) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous() b, n,", "1, n).view(b, m * n, 3) - xyz.repeat(1, m, 1).view(b,", "= torch.cuda.IntTensor(b, m) temp = torch.cuda.FloatTensor(b, n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b, n, m,", "-> Tuple[torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b, c, m, nsample)", "def backward(ctx): return None, None, None knnquery_exclude = KNNQueryExclude.apply class", "2.0 * torch.mm(x, y_t) import numpy as np return torch.clamp(dist,", "xyz_repeat = xyz.repeat(1, m, 1).view(b, m * n, 3) #", "with output: (b, c, m, nsample) \"\"\" assert features.is_contiguous() assert", "nsample).zero_() pointops_cuda.ballquery_cuda(b, n, m, radius, nsample, new_xyz, xyz, idx) return", "c, n = features.size() _, m, nsample = idx.size() output", "return grad_features, None, None interpolation = Interpolation.apply class Grouping(Function): @staticmethod", "m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous() # BxNx3 -> Bx3xN", "xyz.is_contiguous() b, n, _ = max_xyz.size() m = xyz.size(1) distribute_idx", "= grouped_xyz return new_features class GroupAll(nn.Module): \"\"\" Groups all features", "between x[i,:] and y[j,:] if y is not given then", "new_xyz: torch.Tensor, (b, m, 3) centers of the ball query", "None: new_xyz = xyz b, m, _ = new_xyz.size() n", "a Nxd matrix y is an optional Mxd matirx Output:", "class Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\" Groups with a ball query of radius", "__init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_OnlyFeature, self).__init__() self.radius, self.nsample, self.use_xyz =", "_ = unknown.size() m = known.size(1) dist2 = torch.cuda.FloatTensor(b, n,", "forward(ctx, xyz, m): \"\"\" input: xyz: (b, n, 3) and", "class KNNQueryExclude(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz:", "grouping(features, idx) # (b, c, m, nsample) if self.use_xyz: #new_features", "labelstat_ballrange = LabelStatBallRange.apply class LabelStatIdx(Function): @staticmethod def forward(ctx, nsample: int,", "features, idx, output) return output @staticmethod def backward(ctx, a=None): return", "a=None, b=None): return None, None, None, None, None labelstat_and_ballquery =", "pointops_cuda.featuregather_forward_cuda(b, n, m, c, max_feature, distribute_idx, distribute_feature) ctx.for_backwards = (distribute_idx,", "m = features.size() n = idx.size(1) ctx.interpolation_for_backward = (idx, weight,", "to be interpolated from idx: (b, n, 3) three nearest", "coordinates of the features new_xyz: (b, n, 3) centriods features:", "self.nsample, xyz, new_xyz) else: idx = knnquery(self.nsample, xyz, new_xyz) #", "\"\"\" Groups all features \"\"\" def __init__(self, use_xyz: bool =", "c, n) :param distribute_idx: (b, m) :return: distribute_feature: (b, c,", "n, 3) coordinates of the features new_xyz: (b, n, 3)", "None, None nearestneighbor = NearestNeighbor.apply class Interpolation(Function): @staticmethod def forward(ctx,", ":param max_xyz: (b, n, 3) :param xyz: (b, m, 3)", "features to group with output: (b, c, m, nsample) \"\"\"", "a=None): return None, None, None labelstat_idx = LabelStatIdx.apply class LabelStatAndBallQuery(Function):", "c+3, 1, n) else: new_features = grouped_features else: new_features =", "m, n) dist = (new_xyz.repeat(1, 1, n).view(b, m * n,", "m = grad_distribute_feature.size() grad_max_feature = torch.cuda.FloatTensor(b, c, n).zero_() grad_distribute_feature_data =", "xyz: torch.Tensor, (b, n, 3) xyz coordinates of the features", "n) tensor of the interpolated features \"\"\" assert features.is_contiguous() assert", "FeatureDistribute(Function): @staticmethod def forward(ctx, max_xyz: torch.Tensor, xyz: torch.Tensor) -> torch.Tensor:", "@staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: \"\"\"", "return grouped_xyz, new_features class Gen_QueryAndGroupXYZ(nn.Module): \"\"\" Groups with a ball", "idx = idxs[:, :, 1:nsample+1].int() # ''' return idx @staticmethod", "xyz.size() idx = torch.cuda.IntTensor(b, m) temp = torch.cuda.FloatTensor(b, n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b,", "of features to group with output: (b, c, m, nsample)", "neighbors \"\"\" assert unknown.is_contiguous() assert known.is_contiguous() b, n, _ =", "output: (b, c, m, nsample) \"\"\" assert features.is_contiguous() assert idx.is_contiguous()", "nsample) \"\"\" assert features.is_contiguous() assert idx.is_contiguous() b, c, n =", "nsample).zero_() dist2 = torch.cuda.FloatTensor(b, m, nsample).zero_() pointops_cuda.knnquery_cuda(b, n, m, nsample,", "m, nsample = idx.size() output = torch.cuda.LongTensor(b, c, m, nsample)", "new_features class GroupAll(nn.Module): \"\"\" Groups all features \"\"\" def __init__(self,", "n), idx : (b, m) tensor output: (b, c, m)", "= torch.cuda.IntTensor(b, m, nsample).zero_() dist2 = torch.cuda.FloatTensor(b, m, nsample).zero_() pointops_cuda.knnquery_cuda(b,", "m, nsample) #xyz_trans = xyz.transpose(1, 2).contiguous() #grouped_xyz = grouping(xyz_trans, idx)", "the three nearest neighbors of unknown in known input: unknown:", "@staticmethod def backward(ctx, a=None): return None, None featuredistribute = FeatureDistribute.apply", "np.array([i for i in range(2*self.nsample)]) np.random.shuffle(idx2) idx2 = idx2[:self.nsample] idx", "= xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_() dist2 = torch.cuda.FloatTensor(b,", "xyz.size(1) ''' idx = torch.zeros(b, m, nsample).int().cuda() for i in", "m * n, 3) # xyz_repeat = xyz.repeat(1, m, 1).view(b,", "nclass) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous() b, n,", "grad_distribute_feature: torch.Tensor): ''' :param ctx: :param grad_distribute_feature: (b, c, m)", "''' assert max_feature.is_contiguous() assert distribute_idx.is_contiguous() b, c, n = max_feature.size()", "n) return distribute_feature @staticmethod def backward(ctx, grad_distribute_feature: torch.Tensor): ''' :param", "i in range(b): dist = pairwise_distances(new_xyz[i, :, :], xyz[i, :,", "(b, c, m, nsample) if self.use_xyz: #new_features = torch.cat([grouped_xyz, grouped_features],", "assert label_stat.is_contiguous() b, n, nclass = label_stat.size() m = new_xyz.size(1)", "# grouped_idxs: (b, m, nsample) \"\"\" if new_xyz is None:", "label_stat.size() m = idx.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_idx_cuda(b,", "(b, n, 3) coordinates of the features new_xyz: (b, m,", "xyz, new_xyz) # (b, m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous()", "= torch.cuda.LongTensor(b, c, m, nsample) pointops_cuda.grouping_int_forward_cuda(b, c, n, m, nsample,", "Function import torch.nn as nn from metrics.pointops import pointops_cuda import", "new_xyz) # (b, m, nsample) #xyz_trans = xyz.transpose(1, 2).contiguous() #grouped_xyz", "import torch.nn as nn from metrics.pointops import pointops_cuda import numpy", "output) ctx.for_backwards = (idx, n) return output @staticmethod def backward(ctx,", "l2 distance to the three nearest neighbors idx: (b, n,", "n, nclass) :return: new_label_stat: (b, m, nclass) idx: (b, m,", "(b, m, 3) centriods features: (b, c, n) idx: idx", "int, xyz: torch.Tensor, new_xyz: torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\"", "= None, idx: torch.Tensor = None) -> torch.Tensor: \"\"\" input:", "idx.size(1) ctx.interpolation_for_backward = (idx, weight, m) output = torch.cuda.FloatTensor(b, c,", "= idx.size(1) ctx.interpolation_for_backward = (idx, weight, m) output = torch.cuda.FloatTensor(b,", "if new_xyz is None: new_xyz = xyz b, m, _", "label_stat.is_contiguous() b, n, nclass = label_stat.size() m = new_xyz.size(1) new_label_stat", "xyz, idx) return idx @staticmethod def backward(ctx, a=None): return None,", "ctx.for_backwards b, m = idx.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_()", "m).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b, c, n, m, grad_out_data, idx,", "if features is not None: grouped_features = grouping(features, idx) #", "n) pointops_cuda.interpolation_forward_cuda(b, c, m, n, features, idx, weight, output) return", "max_feature: (b, c, n) :param distribute_idx: (b, m) :return: distribute_feature:", "b, c, m = features.size() n = idx.size(1) ctx.interpolation_for_backward =", "in the balls xyz: torch.Tensor, (b, n, 3) xyz coordinates", "\"\"\" assert features.is_contiguous() assert idx.is_contiguous() b, c, n = features.size()", "-> Bx3xN grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m,", "y_norm - 2.0 * torch.mm(x, y_t) import numpy as np", "class FurthestSampling(Function): @staticmethod def forward(ctx, xyz, m): \"\"\" input: xyz:", "2).contiguous() # BxNx3 -> Bx3xN grouped_xyz = grouping(xyz_trans, idx) #", "features \"\"\" assert features.is_contiguous() assert idx.is_contiguous() assert weight.is_contiguous() b, c,", ":param max_feature: (b, c, n) :param distribute_idx: (b, m) :return:", "= KNNQueryExclude.apply class Le_QueryAndGroup_SameSize(nn.Module): \"\"\" Groups with a ball query", "# (b, m, nsample) #grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features", "nsample = grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data =", "(b, m, nclass) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous()", ":, :] = idxs[:, 0:nsample] ''' # ''' # new_xyz_repeat", "input: features: (b, c, m) features descriptors to be interpolated", "(b, n, 3) centriods features: (b, c, n) idx: idx", "Maximum number of features to gather in the ball \"\"\"", "def forward(ctx, features, idx): \"\"\" input: features: (b, c, n),", "def forward(ctx, features: torch.Tensor, idx: torch.Tensor, weight: torch.Tensor) -> torch.Tensor:", "grad_features.data) return grad_features, None gathering = Gathering.apply class NearestNeighbor(Function): @staticmethod", "from metrics.pointops import pointops_cuda import numpy as np class FurthestSampling(Function):", "y_norm = x_norm.view(1, -1) dist = x_norm + y_norm -", ") \"\"\" if new_xyz is None: new_xyz = xyz assert", "(x ** 2).sum(1).view(-1, 1) if y is not None: y_t", "xyz.size(1) distribute_idx = torch.cuda.IntTensor(b, m).zero_() pointops_cuda.featuredistribute_cuda(b, n, m, max_xyz, xyz,", "new_xyz: torch.Tensor) -> torch.Tensor: \"\"\" input: radius: float, radius of", "def backward(ctx, a=None): return None, None featuredistribute = FeatureDistribute.apply class", "xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor) -> torch.Tensor: ''' :param", "m, nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b, n, m, radius, nsample, nclass, new_xyz, xyz,", "\"\"\" assert xyz.size() == new_xyz.size() if new_xyz is None: new_xyz", "features \"\"\" def __init__(self, use_xyz: bool = True): super(GroupAll, self).__init__()", "radius=None, nsample=32, use_xyz=True): super(Gen_QueryAndGroupXYZ, self).__init__() self.radius, self.nsample, self.use_xyz = radius,", "* n, 3) # xyz_repeat = xyz.repeat(1, m, 1).view(b, m", "weight, output) return output @staticmethod def backward(ctx, grad_out: torch.Tensor) ->", "dist2) return idx @staticmethod def backward(ctx, a=None): return None, None,", "known: (b, m, 3) output: dist2: (b, n, 3) l2", "torch.cuda.FloatTensor(b, m, nsample).zero_() pointops_cuda.knnquery_cuda(b, n, m, nsample, xyz, new_xyz, idx,", "def forward(ctx, max_xyz: torch.Tensor, xyz: torch.Tensor) -> torch.Tensor: \"\"\" :param", "torch.Tensor) -> torch.Tensor: \"\"\" input: radius: float, radius of the", "m, _ = new_xyz.size() n = xyz.size(1) ''' idx =", "\"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_OnlyFeature, self).__init__() self.radius, self.nsample,", "n = features.size() m = idx.size(1) output = torch.cuda.FloatTensor(b, c,", "interpolation on 3 features input: features: (b, c, m) features", "numpy as np return torch.clamp(dist, 0.0, np.inf) class KNNQueryNaive(Function): @staticmethod", "radius, nclass, new_xyz, xyz, label_stat, new_label_stat) return new_label_stat @staticmethod def", "y_t = torch.transpose(x, 0, 1) y_norm = x_norm.view(1, -1) dist", "(b, m, nsample) tensor with the indicies of the features", "(b, n, nclass) :param idx: (b, m, nsample) :return: new_label_stat:", "pointops_cuda.grouping_forward_cuda(b, c, n, m, nsample, features, idx, output) ctx.for_backwards =", "FeatureDistribute.apply class FeatureGather(Function): @staticmethod def forward(ctx, max_feature: torch.Tensor, distribute_idx: torch.Tensor)", "0.0, np.inf) class KNNQueryNaive(Function): @staticmethod def forward(ctx, nsample: int, xyz:", "grouped_features], dim=1) # (b, c+3, m, nsample) else: new_features =", "n, m, grad_out_data, idx, grad_features.data) return grad_features, None gathering =", "n, 3) index of 3 nearest neighbors \"\"\" assert unknown.is_contiguous()", "None labelstat_and_ballquery = LabelStatAndBallQuery.apply def pairwise_distances(x, y=None): ''' Input: x", "new_features = grouped_features # (b, c, m, nsample) else: new_features", "grouped_xyz, new_features class QueryAndGroup(nn.Module): \"\"\" Groups with a ball query", "torch.transpose(y, 0, 1) y_norm = (y ** 2).sum(1).view(1, -1) else:", "output: grad_features: (b, c, m), None, None \"\"\" idx, weight,", "output: idx: (b, m, nsample) ( dist2: (b, m, nsample)", "output: idx: (b, m, nsample) \"\"\" if new_xyz is None:", "(b, n, 3) index of 3 nearest neighbors \"\"\" assert", "class BallQuery(Function): @staticmethod def forward(ctx, radius: float, nsample: int, xyz:", "grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b, c,", "grad_distribute_feature: (b, c, m) :return: grad_max_feature: (b, c, n), None", "torch.Tensor = None) -> torch.Tensor: \"\"\" input: xyz: (b, n,", "dist = (new_xyz_repeat - xyz_repeat).pow(2).sum(dim=2).view(b, m, n) dist = (new_xyz.repeat(1,", "return idx @staticmethod def backward(ctx, a=None): return None, None, None,", "return grad_max_feature, None featuregather = FeatureGather.apply class LabelStatBallRange(Function): @staticmethod def", "backward(ctx, grad_out): idx, c, n = ctx.for_backwards b, m =", "grouping(xyz_trans, idx) # (b, 3, m, nsample) return grouped_xyz class", "form the query balls \"\"\" assert xyz.is_contiguous() assert new_xyz.is_contiguous() b,", "= idx[:, :, idx2] xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz =", "if new_xyz is None: new_xyz = xyz assert xyz.is_contiguous() assert", "if y is not given then use 'y=x'. i.e. dist[i,j]", "unknown.is_contiguous() assert known.is_contiguous() b, n, _ = unknown.size() m =", "feature!\" new_features = grouped_xyz return new_features class QueryAndGroup_Dilate(nn.Module): \"\"\" Groups", ": (b, m) tensor output: (b, c, m) \"\"\" assert", "forward(ctx, unknown: torch.Tensor, known: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" Find", "+ y_norm - 2.0 * torch.mm(x, y_t) import numpy as", "pointops_cuda.grouping_int_forward_cuda(b, c, n, m, nsample, features, idx, output) return output", "nsample, features, idx, output) return output @staticmethod def backward(ctx, a=None):", "m, nsample, xyz, new_xyz, idx, dist2) return idx @staticmethod def", "assert known.is_contiguous() b, n, _ = unknown.size() m = known.size(1)", "Bx3xN grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m, nsample)", "grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b, c, n, m, nsample, grad_out_data, idx, grad_features.data) return", "idx, output) ctx.for_backwards = (idx, n) return output @staticmethod def", "\"\"\" input: radius: float, radius of the balls nsample: int,", "-> torch.Tensor: \"\"\" Performs weight linear interpolation on 3 features", "max_xyz.is_contiguous() assert xyz.is_contiguous() b, n, _ = max_xyz.size() m =", "pointops_cuda.nearestneighbor_cuda(b, n, m, unknown, known, dist2, idx) return torch.sqrt(dist2), idx", "1) if y is not None: y_t = torch.transpose(y, 0,", "class LabelStatBallRange(Function): @staticmethod def forward(ctx, radius: float, xyz: torch.Tensor, new_xyz:", "is not None: y_t = torch.transpose(y, 0, 1) y_norm =", "c+3, m, nsample) # grouped_idxs: (b, m, nsample) \"\"\" assert", "idx = idx[:, :, idx2] xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz", "def backward(ctx): return None, None, None knnquery_naive = KNNQueryNaive.apply class", "n) [_, idxs] = torch.sort(dist, dim=2) idx = idxs[:, :,", "self.nsample, self.use_xyz = radius, nsample, use_xyz def forward(self, xyz: torch.Tensor,", "output @staticmethod def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\"", "features descriptors to be interpolated from idx: (b, n, 3)", "m, nclass) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous() b,", "torch.Tensor, idx: torch.Tensor, weight: torch.Tensor) -> torch.Tensor: \"\"\" Performs weight", "idx.size() output = torch.cuda.FloatTensor(b, c, m, nsample) pointops_cuda.grouping_forward_cuda(b, c, n,", "class GroupingInt(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor) ->", "nsample: int32, Maximum number of features to gather in the", "of features to gather in the ball \"\"\" def __init__(self,", "return output @staticmethod def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor,", ":param ctx: :param nsample: :param label_stat: (b, n, nclass) :param", "b, c, n = max_feature.size() m = distribute_idx.size(1) distribute_feature =", "new_features class Gen_QueryAndGroupXYZ(nn.Module): \"\"\" Groups with a ball query of", "label_stat, new_label_stat) return new_label_stat @staticmethod def backward(ctx, a=None): return None,", "i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' x_norm = (x ** 2).sum(1).view(-1,", "-> Tuple[torch.Tensor]: \"\"\" KNN Indexing input: nsample: int32, Number of", "grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b, c,", "target features in features weight: (b, n, 3) weights output:", "c, n), None \"\"\" idx, n = ctx.for_backwards b, c,", "3) idx = torch.cuda.IntTensor(b, n, 3) pointops_cuda.nearestneighbor_cuda(b, n, m, unknown,", "temp, idx) return idx @staticmethod def backward(xyz, a=None): return None,", "nsample=32, use_xyz=True): super(QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample,", "features.size() _, m, nsample = idx.size() output = torch.cuda.FloatTensor(b, c,", "c, m, nsample) pointops_cuda.grouping_int_forward_cuda(b, c, n, m, nsample, features, idx,", "new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b, n, m, radius, nclass,", "n, m, xyz, temp, idx) return idx @staticmethod def backward(xyz,", "None, None \"\"\" idx, weight, m = ctx.interpolation_for_backward b, c,", "m, 3) centriods output: idx: (b, m, nsample) \"\"\" if", "# BxNx3 -> Bx3xN grouped_xyz = grouping(xyz_trans, idx) # (b,", "# le new_features = grouped_features # (b, c, m, nsample)", "''' x_norm = (x ** 2).sum(1).view(-1, 1) if y is", "radius: :param nsample: :param xyz: (b, n, 3) :param new_xyz:", "m) tensor output: (b, c, m) \"\"\" assert features.is_contiguous() assert", "Groups all features \"\"\" def __init__(self, use_xyz: bool = True):", "idx.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_idx_cuda(b, n, m, nsample,", "n).view(b, m * n, 3) - xyz.repeat(1, m, 1).view(b, m", "= grouped_features # (b, c, m, nsample) else: new_features =", "in the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup_Dilate,", "= NearestNeighbor.apply class Interpolation(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx:", "(b, c, n) tensor of the interpolated features \"\"\" assert", "idx2[:self.nsample] idx = idx[:, :, idx2] xyz_trans = xyz.transpose(1, 2).contiguous()", "2).unsqueeze(2) if features is not None: grouped_features = features.unsqueeze(2) if", "pointops_cuda.gathering_forward_cuda(b, c, n, m, features, idx, output) ctx.for_backwards = (idx,", "import Function import torch.nn as nn from metrics.pointops import pointops_cuda", "radius of the balls nsample: int, maximum number of features", "the interpolated features \"\"\" assert features.is_contiguous() assert idx.is_contiguous() assert weight.is_contiguous()", "neighbor xyz: (b, n, 3) coordinates of the features new_xyz:", "pointops_cuda.labelstat_and_ballquery_cuda(b, n, m, radius, nsample, nclass, new_xyz, xyz, label_stat, idx,", "= torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, m, nsample) #", "torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor): ''' :param ctx: :param radius:", "3) centriods features: (b, c, n) idx: idx of neighbors", "ctx.for_backwards = (distribute_idx, n) return distribute_feature @staticmethod def backward(ctx, grad_distribute_feature:", "# (b, m, nsample) grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features", "new_xyz.is_contiguous() assert label_stat.is_contiguous() b, n, nclass = label_stat.size() m =", "Indexing input: nsample: int32, Number of neighbor xyz: (b, n,", "nearest neighbors \"\"\" assert unknown.is_contiguous() assert known.is_contiguous() b, n, _", "m) features descriptors to be interpolated from idx: (b, n,", "use xyz as a feature!\" new_features = grouped_xyz return grouped_xyz,", "torch.Tensor: ''' :param ctx: :param max_feature: (b, c, n) :param", "__init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup_Dilate, self).__init__() self.radius, self.nsample, self.use_xyz =", "dim=1) # (b, c+3, m, nsample) else: new_features = grouped_features", "m, nsample, grad_out_data, idx, grad_features.data) return grad_features, None grouping =", "= ctx.interpolation_for_backward b, c, n = grad_out.size() grad_features = torch.cuda.FloatTensor(b,", "return None, None furthestsampling = FurthestSampling.apply class Gathering(Function): @staticmethod def", "None: idx = ballquery(self.radius, 2*self.nsample, xyz, new_xyz) else: # idx", "@staticmethod def backward(xyz, a=None): return None, None furthestsampling = FurthestSampling.apply", "n), None ''' distribute_idx, n = ctx.for_backwards b, c, m", "centriods output: idx: (b, m, nsample) \"\"\" if new_xyz is", "torch.nn as nn from metrics.pointops import pointops_cuda import numpy as", "distribute_idx, n = ctx.for_backwards b, c, m = grad_distribute_feature.size() grad_max_feature", "'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' x_norm = (x **", "KNNQuery.apply class KNNQueryExclude(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor,", "nsample) # grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample)", "nsample=32, use_xyz=True): super(Le_QueryAndGroup_OnlyFeature, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample,", "int32 output: idx: (b, m) \"\"\" assert xyz.is_contiguous() b, n,", "= xyz if idx is None: if self.radius is not", "None: new_xyz = xyz #if idx is None: if self.radius", "2).sum(1).view(1, -1) else: y_t = torch.transpose(x, 0, 1) y_norm =", "interpolated from idx: (b, n, 3) three nearest neighbors of", "grouped_xyz return new_features class Le_QueryAndGroup(nn.Module): \"\"\" Groups with a ball", "m, nclass).zero_() idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b, n, m,", "@staticmethod def backward(ctx): return None, None, None knnquery_exclude = KNNQueryExclude.apply", "dist2: (b, n, 3) l2 distance to the three nearest", "np class FurthestSampling(Function): @staticmethod def forward(ctx, xyz, m): \"\"\" input:", "3) output: dist2: (b, n, 3) l2 distance to the", "ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_SameSize, self).__init__() self.radius,", "(b, m) tensor output: (b, c, m) \"\"\" assert features.is_contiguous()", "(b, c, n), None \"\"\" idx, n = ctx.for_backwards b,", "class FeatureDistribute(Function): @staticmethod def forward(ctx, max_xyz: torch.Tensor, xyz: torch.Tensor) ->", "known input: unknown: (b, n, 3), known: (b, m, 3)", "FurthestSampling(Function): @staticmethod def forward(ctx, xyz, m): \"\"\" input: xyz: (b,", "idx: (b, n, 3) index of 3 nearest neighbors \"\"\"", "= radius, nsample, use_xyz def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor", "idx.is_contiguous() b, n, nclass = label_stat.size() m = idx.size(1) new_label_stat", "if features is not None: grouped_features = grouping(features, idx) if", "m, max_xyz, xyz, distribute_idx) return distribute_idx @staticmethod def backward(ctx, a=None):", "features and not use xyz as a feature!\" new_features =", "as a feature!\" new_features = grouped_xyz return new_features class Le_QueryAndGroup(nn.Module):", "nsample) ) \"\"\" if new_xyz is None: new_xyz = xyz", "ctx: :param radius: :param xyz: (b, n, 3) :param new_xyz:", "new_label_stat) return new_label_stat @staticmethod def backward(ctx, a=None): return None, None,", "Mxd matirx Output: dist is a NxM matrix where dist[i,j]", "c, n) pointops_cuda.interpolation_forward_cuda(b, c, m, n, features, idx, weight, output)", "c, n).zero_() grad_distribute_feature_data = grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b, n, m, c, grad_distribute_feature_data,", "Gen_QueryAndGroupXYZ(nn.Module): \"\"\" Groups with a ball query of radius parameters:", "ballquery(self.radius, 2*self.nsample, xyz, new_xyz) else: # idx = knnquery_naive(self.nsample, xyz,", "of the features new_xyz: ignored torch features: (b, c, n)", "m * n, 3) - xyz.repeat(1, m, 1).view(b, m *", "idxs] = torch.sort(dist, dim=1) idx[i, :, :] = idxs[:, 0:nsample]", "c, n, m, nsample, grad_out_data, idx, grad_features.data) return grad_features, None", "= features.size() _, m, nsample = idx.size() output = torch.cuda.FloatTensor(b,", "torch.sort(dist, dim=2) idx = idxs[:, :, 0:nsample].int() # ''' return", "BxNx3 -> Bx3xN grouped_xyz = grouping(xyz_trans, idx) # (b, 3,", "dist = (new_xyz.repeat(1, 1, n).view(b, m * n, 3) -", "c, n), None ''' distribute_idx, n = ctx.for_backwards b, c,", "square norm between x[i,:] and y[j,:] if y is not", "def forward(ctx, radius: float, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor)", "new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() idx = torch.cuda.IntTensor(b, m, nsample).zero_()", "not given then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 '''", "super(QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz def", "n, 3) # xyz_repeat = xyz.repeat(1, m, 1).view(b, m *", "@staticmethod def backward(ctx, grad_out): idx, c, n = ctx.for_backwards b,", "NearestNeighbor(Function): @staticmethod def forward(ctx, unknown: torch.Tensor, known: torch.Tensor) -> Tuple[torch.Tensor,", "3 nearest neighbors \"\"\" assert unknown.is_contiguous() assert known.is_contiguous() b, n,", "a ball query of radius parameters: radius: float32, Radius of", "grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b, n, m, c, grad_distribute_feature_data, distribute_idx, grad_max_feature.data) return grad_max_feature,", "nsample) grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features is not None:", "radius, nsample, use_xyz #def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor =", "def pairwise_distances(x, y=None): ''' Input: x is a Nxd matrix", "xyz: torch.Tensor, new_xyz: torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\" KNN", "class Interpolation(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor, weight:", "m, c, grad_distribute_feature_data, distribute_idx, grad_max_feature.data) return grad_max_feature, None featuregather =", "[_, idxs] = torch.sort(dist, dim=2) idx = idxs[:, :, 0:nsample].int()", "balls xyz: torch.Tensor, (b, n, 3) xyz coordinates of the", "nclass) idx: (b, m, nsample) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous()", "features new_xyz: ignored torch features: (b, c, n) descriptors of", ":param idx: (b, m, nsample) :return: new_label_stat: (b, m, nclass)", "''' :param ctx: :param nsample: :param label_stat: (b, n, nclass)", "c, max_feature, distribute_idx, distribute_feature) ctx.for_backwards = (distribute_idx, n) return distribute_feature", "idxs[:, :, 1:nsample+1].int() # ''' return idx @staticmethod def backward(ctx):", "idx) # (b, c, m, nsample) if self.use_xyz: #new_features =", "idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.ballquery_cuda(b, n, m, radius, nsample,", "return None, None, None, None, None labelstat_and_ballquery = LabelStatAndBallQuery.apply def", "from idx: (b, n, 3) three nearest neighbors of the", "max_feature, distribute_idx, distribute_feature) ctx.for_backwards = (distribute_idx, n) return distribute_feature @staticmethod", "return grouped_xyz class Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\" Groups with a ball query", "typing import Tuple import torch from torch.autograd import Function import", "c, n, m, nsample, features, idx, output) ctx.for_backwards = (idx,", "= grouped_xyz return grouped_xyz, new_features class QueryAndGroup(nn.Module): \"\"\" Groups with", "= grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b, c, n, m, nsample, grad_out_data, idx, grad_features.data)", "as np class FurthestSampling(Function): @staticmethod def forward(ctx, xyz, m): \"\"\"", "ctx.interpolation_for_backward b, c, n = grad_out.size() grad_features = torch.cuda.FloatTensor(b, c,", "None grouping = Grouping.apply class GroupingInt(Function): @staticmethod def forward(ctx, features:", "m, nsample).zero_() dist2 = torch.cuda.FloatTensor(b, m, nsample).zero_() pointops_cuda.knnquery_cuda(b, n, m,", "= features.size() m = idx.size(1) output = torch.cuda.FloatTensor(b, c, m)", "@staticmethod def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: \"\"\"", "n) return output @staticmethod def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor,", "the balls xyz: torch.Tensor, (b, n, 3) xyz coordinates of", "= torch.cuda.IntTensor(b, n, 3) pointops_cuda.nearestneighbor_cuda(b, n, m, unknown, known, dist2,", "None, None, None, None labelstat_and_ballquery = LabelStatAndBallQuery.apply def pairwise_distances(x, y=None):", "grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b, c,", "idx = torch.cuda.IntTensor(b, m) temp = torch.cuda.FloatTensor(b, n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b, n,", "nsample) xyz_trans = xyz.transpose(1, 2).contiguous() # BxNx3 -> Bx3xN grouped_xyz", "m, nsample) containing the indicies of features to group with", "LabelStatBallRange.apply class LabelStatIdx(Function): @staticmethod def forward(ctx, nsample: int, label_stat: torch.Tensor,", "bool = True): super(GroupAll, self).__init__() self.use_xyz = use_xyz def forward(self,", "= (new_xyz.repeat(1, 1, n).view(b, m * n, 3) - xyz.repeat(1,", "self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz def forward(self, xyz:", "self.use_xyz = radius, nsample, use_xyz def forward(self, xyz: torch.Tensor, new_xyz:", "m, 1).view(b, m * n, 3)).pow(2).sum(dim=2).view(b, m, n) [_, idxs]", "m * n, 3)).pow(2).sum(dim=2).view(b, m, n) [_, idxs] = torch.sort(dist,", "(distribute_idx, n) return distribute_feature @staticmethod def backward(ctx, grad_distribute_feature: torch.Tensor): '''", "return None, None grouping_int = GroupingInt.apply class BallQuery(Function): @staticmethod def", "coordinates of the features new_xyz: (b, m, 3) centriods features:", ":param radius: :param nsample: :param xyz: (b, n, 3) :param", "new_label_stat: (b, m, nclass) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert", "dim=2) idx = idxs[:, :, 1:nsample+1].int() # ''' return idx", "assert idx.is_contiguous() b, n, nclass = label_stat.size() m = idx.size(1)", "# dist = (new_xyz_repeat - xyz_repeat).pow(2).sum(dim=2).view(b, m, n) dist =", "new_features = grouped_xyz return new_features class Le_QueryAndGroup(nn.Module): \"\"\" Groups with", "= GroupingInt.apply class BallQuery(Function): @staticmethod def forward(ctx, radius: float, nsample:", "#def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None, features: torch.Tensor", "new_xyz.is_contiguous() b, n, _ = xyz.size() m = new_xyz.size(1) idx", "# idx = knnquery_naive(self.nsample, xyz, new_xyz) # (b, m, nsample)", "knnquery_naive = KNNQueryNaive.apply class KNNQuery(Function): @staticmethod def forward(ctx, nsample: int,", "2*self.nsample, xyz, new_xyz) else: # idx = knnquery_naive(self.nsample, xyz, new_xyz)", "grad_features.data) return grad_features, None, None interpolation = Interpolation.apply class Grouping(Function):", "(b, m, 3) :return: distribute_idx: (b, m) \"\"\" assert max_xyz.is_contiguous()", "grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, m).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b,", "weights output: (b, c, n) tensor of the interpolated features", "n, 3) three nearest neighbors of the target features in", "= new_xyz.size() n = xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_()", "idx of neighbors # idxs: (b, n) output: new_features: (b,", "Le_QueryAndGroup(nn.Module): \"\"\" Groups with a ball query of radius parameters:", "i in range(2*self.nsample)]) np.random.shuffle(idx2) idx2 = idx2[:self.nsample] idx = idx[:,", "is None: if self.radius is not None: idx = ballquery(self.radius,", "query output: (b, m, nsample) tensor with the indicies of", "None, None labelstat_ballrange = LabelStatBallRange.apply class LabelStatIdx(Function): @staticmethod def forward(ctx,", "class KNNQuery(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz:", "(idx, c, n) return output @staticmethod def backward(ctx, grad_out): idx,", "class GroupAll(nn.Module): \"\"\" Groups all features \"\"\" def __init__(self, use_xyz:", "self.nsample, xyz, new_xyz) else: # idx = knnquery_naive(self.nsample, xyz, new_xyz)", "\"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup, self).__init__() self.radius, self.nsample,", "new_xyz = xyz assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, m, _", "c, n, m, nsample, features, idx, output) return output @staticmethod", "n) descriptors of the features output: new_features: (b, c+3, 1,", "3) :param xyz: (b, m, 3) :return: distribute_idx: (b, m)", "int, label_stat: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: ''' :param ctx:", "3) l2 distance to the three nearest neighbors idx: (b,", "n = max_feature.size() m = distribute_idx.size(1) distribute_feature = torch.cuda.FloatTensor(b, c,", "grad_features: (b, c, m), None, None \"\"\" idx, weight, m", "weight linear interpolation on 3 features input: features: (b, c,", "self.use_xyz: #new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, m,", "m, 3) centriods features: (b, c, n) idx: idx of", "dist is a NxM matrix where dist[i,j] is the square", "n = xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_() dist2 =", "_ = new_xyz.size() n = xyz.size(1) ''' idx = torch.zeros(b,", "self.radius is not None: idx = ballquery(self.radius, self.nsample, xyz, new_xyz)", "@staticmethod def forward(ctx, radius: float, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat:", "@staticmethod def forward(ctx, max_feature: torch.Tensor, distribute_idx: torch.Tensor) -> torch.Tensor: '''", "grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample) #grouped_xyz -=", "n, 3), known: (b, m, 3) output: dist2: (b, n,", "that form the query balls \"\"\" assert xyz.is_contiguous() assert new_xyz.is_contiguous()", "new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, 1, n)", "grouped_features], dim=1) # (b, c+3, m, nsample) # le new_features", "a=None): return None, None grouping_int = GroupingInt.apply class BallQuery(Function): @staticmethod", "is not None: idx = ballquery(self.radius, self.nsample, xyz, new_xyz) else:", "new_xyz: torch.Tensor, features: torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\" input:", "all features \"\"\" def __init__(self, use_xyz: bool = True): super(GroupAll,", "grad_out: (b, c, m, nsample) output: (b, c, n), None", "input: xyz: (b, n, 3) coordinates of the features new_xyz:", "new_features = grouped_xyz return new_features class GroupAll(nn.Module): \"\"\" Groups all", "n, 3) coordinates of the features new_xyz: (b, m, 3)", "grad_features, None gathering = Gathering.apply class NearestNeighbor(Function): @staticmethod def forward(ctx,", "self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz def forward(self,", "idx: torch.Tensor, weight: torch.Tensor) -> torch.Tensor: \"\"\" Performs weight linear", "features new_xyz: (b, n, 3) centriods features: (b, c, n)", ":]) [_, idxs] = torch.sort(dist, dim=1) idx[i, :, :] =", "number of features in the balls xyz: torch.Tensor, (b, n,", "n) :param distribute_idx: (b, m) :return: distribute_feature: (b, c, m)", "torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.ballquery_cuda(b, n, m, radius, nsample, new_xyz, xyz,", "KNNQueryNaive(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor", "= grouped_features else: assert self.use_xyz, \"Cannot have not features and", "assert features.is_contiguous() assert idx.is_contiguous() assert weight.is_contiguous() b, c, m =", "self).__init__() self.use_xyz = use_xyz def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor,", "nsample) #grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features is not None:", "= unknown.size() m = known.size(1) dist2 = torch.cuda.FloatTensor(b, n, 3)", "m, nsample) return grouped_xyz class Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\" Groups with a", "(b, c, m), None, None \"\"\" idx, weight, m =", "Input: x is a Nxd matrix y is an optional", "c, m = features.size() n = idx.size(1) ctx.interpolation_for_backward = (idx,", ":, idx2] xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans, idx)", "1) y_norm = (y ** 2).sum(1).view(1, -1) else: y_t =", "grouped_features = grouping(features, idx) # (b, c, m, nsample) if", "radius=None, nsample=32, use_xyz=True): super(QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz = radius,", "xyz, distribute_idx) return distribute_idx @staticmethod def backward(ctx, a=None): return None,", "new_xyz_repeat = new_xyz.repeat(1, 1, n).view(b, m * n, 3) #", "xyz: torch.Tensor, new_xyz: torch.Tensor = None) -> torch.Tensor: \"\"\" input:", "# (b, c+3, 1, n) else: new_features = grouped_features else:", "distribute_idx @staticmethod def backward(ctx, a=None): return None, None featuredistribute =", "None) -> Tuple[torch.Tensor]: \"\"\" KNN Indexing input: nsample: int32, Number", ":, 0:nsample].int() # ''' return idx @staticmethod def backward(ctx): return", "c, n, m, features, idx, output) ctx.for_backwards = (idx, c,", "return grad_features, None gathering = Gathering.apply class NearestNeighbor(Function): @staticmethod def", "idx @staticmethod def backward(xyz, a=None): return None, None furthestsampling =", "labelstat_and_ballquery = LabelStatAndBallQuery.apply def pairwise_distances(x, y=None): ''' Input: x is", "gather in the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True):", "(b, n, 3) :param xyz: (b, m, 3) :return: distribute_idx:", "@staticmethod def backward(ctx, grad_distribute_feature: torch.Tensor): ''' :param ctx: :param grad_distribute_feature:", "m: int32 output: idx: (b, m) \"\"\" assert xyz.is_contiguous() b,", "xyz as a feature!\" new_features = grouped_xyz return new_features class", "None grouping_int = GroupingInt.apply class BallQuery(Function): @staticmethod def forward(ctx, radius:", "nsample) else: new_features = grouped_features else: assert self.use_xyz, \"Cannot have", "features.is_contiguous() assert idx.is_contiguous() b, c, n = features.size() _, m,", "c, m, nsample) else: new_features = grouped_features else: assert self.use_xyz,", "torch.cuda.FloatTensor(b, c, m).zero_() pointops_cuda.featuregather_forward_cuda(b, n, m, c, max_feature, distribute_idx, distribute_feature)", "torch.Tensor): ''' :param ctx: :param radius: :param nsample: :param xyz:", "features that form the query balls \"\"\" assert xyz.is_contiguous() assert", "= idxs[:, :, 1:nsample+1].int() # ''' return idx @staticmethod def", "torch.Tensor = None, idx: torch.Tensor = None) -> torch.Tensor: \"\"\"", "n).view(b, m * n, 3) # xyz_repeat = xyz.repeat(1, m,", "torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, m, nsample) # le", "distribute_idx, grad_max_feature.data) return grad_max_feature, None featuregather = FeatureGather.apply class LabelStatBallRange(Function):", "label_stat: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param", "(b, m) \"\"\" assert xyz.is_contiguous() b, n, _ = xyz.size()", "grad_distribute_feature_data = grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b, n, m, c, grad_distribute_feature_data, distribute_idx, grad_max_feature.data)", "output: dist2: (b, n, 3) l2 distance to the three", "interpolation = Interpolation.apply class Grouping(Function): @staticmethod def forward(ctx, features: torch.Tensor,", "y=None): ''' Input: x is a Nxd matrix y is", "return distribute_feature @staticmethod def backward(ctx, grad_distribute_feature: torch.Tensor): ''' :param ctx:", "= idx.size(1) output = torch.cuda.FloatTensor(b, c, m) pointops_cuda.gathering_forward_cuda(b, c, n,", "2).sum(1).view(-1, 1) if y is not None: y_t = torch.transpose(y,", "else: idx = knnquery(self.nsample, xyz, new_xyz) # (b, m, nsample)", "= grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, m).zero_() grad_out_data = grad_out.data.contiguous()", "max_feature: torch.Tensor, distribute_idx: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param", ":, 1:nsample+1].int() # ''' return idx @staticmethod def backward(ctx): return", "the features new_xyz: ignored torch features: (b, c, n) descriptors", "of the features that form the query balls \"\"\" assert", "def backward(ctx, a=None, b=None): return None, None, None, None, None", "= features.unsqueeze(2) if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1) #", "m, nsample) # grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m,", "a=None): return None, None furthestsampling = FurthestSampling.apply class Gathering(Function): @staticmethod", "= ctx.for_backwards b, c, m = grad_distribute_feature.size() grad_max_feature = torch.cuda.FloatTensor(b,", "xyz: (b, n, 3) coordinates of the features new_xyz: ignored", "torch.Tensor, idx: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param nsample:", "2).contiguous() #grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m, nsample)", "torch.cuda.IntTensor(b, m) temp = torch.cuda.FloatTensor(b, n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b, n, m, xyz,", "features weight: (b, n, 3) weights output: (b, c, n)", "dist = pairwise_distances(new_xyz[i, :, :], xyz[i, :, :]) [_, idxs]", "= (new_xyz_repeat - xyz_repeat).pow(2).sum(dim=2).view(b, m, n) dist = (new_xyz.repeat(1, 1,", "-> torch.Tensor: \"\"\" input: features: (b, c, n), idx :", "n, m, unknown, known, dist2, idx) return torch.sqrt(dist2), idx @staticmethod", "grad_out): idx, c, n = ctx.for_backwards b, m = idx.size()", "m = new_xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.ballquery_cuda(b, n,", "grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample) #grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1)", "torch.Tensor, label_stat: torch.Tensor): ''' :param ctx: :param radius: :param nsample:", "return None, None, None, None labelstat_ballrange = LabelStatBallRange.apply class LabelStatIdx(Function):", "torch.clamp(dist, 0.0, np.inf) class KNNQueryNaive(Function): @staticmethod def forward(ctx, nsample: int,", "new_features class QueryAndGroup_Dilate(nn.Module): \"\"\" Groups with a ball query of", "= features.size() _, m, nsample = idx.size() output = torch.cuda.LongTensor(b,", "# idxs: (b, n) output: new_features: (b, c+3, m, nsample)", "3) :param label_stat: (b, n, nclass) :return: new_label_stat: (b, m,", "= idx2[:self.nsample] idx = idx[:, :, idx2] xyz_trans = xyz.transpose(1,", "use_xyz=True): super(Le_QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz", "the features new_xyz: (b, n, 3) centriods features: (b, c,", "the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup, self).__init__()", "m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans, idx)", "(b, 3, m, nsample) # grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() #", "(b, m, 3) output: dist2: (b, n, 3) l2 distance", "3) # xyz_repeat = xyz.repeat(1, m, 1).view(b, m * n,", "float32, Radius of ball nsample: int32, Maximum number of features", "super(Le_QueryAndGroup_OnlyFeature, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz def", "idx) return idx @staticmethod def backward(ctx, a=None): return None, None,", "neighbors of unknown in known input: unknown: (b, n, 3),", "(b, c+3, m, nsample) # le new_features = grouped_features #", "# (b, m, nsample) idx = knnquery(2*self.nsample, xyz, new_xyz) #", "xyz: (b, n, 3) and n > m, m: int32", "new_xyz = xyz if idx is None: if self.radius is", ":param xyz: (b, m, 3) :return: distribute_idx: (b, m) \"\"\"", "# (b, m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz =", "use_xyz: bool = True): super(GroupAll, self).__init__() self.use_xyz = use_xyz def", "radius: :param xyz: (b, n, 3) :param new_xyz: (b, m,", "y is not given then use 'y=x'. i.e. dist[i,j] =", "query balls \"\"\" assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, n, _", "weight, m = ctx.interpolation_for_backward b, c, n = grad_out.size() grad_features", "xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m,", "(b, m, nsample) idx = knnquery(2*self.nsample, xyz, new_xyz) # (b,", "dim=1) idx[i, :, :] = idxs[:, 0:nsample] ''' # '''", "m = ctx.interpolation_for_backward b, c, n = grad_out.size() grad_features =", "features: torch.Tensor = None, idx: torch.Tensor = None) -> torch.Tensor:", "dist = x_norm + y_norm - 2.0 * torch.mm(x, y_t)", "return idx @staticmethod def backward(ctx, a=None): return None, None, None", "return None, None nearestneighbor = NearestNeighbor.apply class Interpolation(Function): @staticmethod def", "use xyz as a feature!\" new_features = grouped_xyz return new_features", "c+3, m, nsample) else: new_features = grouped_features else: assert self.use_xyz,", "m, nsample).zero_() pointops_cuda.ballquery_cuda(b, n, m, radius, nsample, new_xyz, xyz, idx)", "on 3 features input: features: (b, c, m) features descriptors", "n > m, m: int32 output: idx: (b, m) \"\"\"", "weight: torch.Tensor) -> torch.Tensor: \"\"\" Performs weight linear interpolation on", "max_xyz: (b, n, 3) :param xyz: (b, m, 3) :return:", "= (x ** 2).sum(1).view(-1, 1) if y is not None:", "n, m, nsample, features, idx, output) ctx.for_backwards = (idx, n)", "@staticmethod def backward(ctx, a=None): return None, None, None knnquery =", "c, n), idx : (b, m) tensor output: (b, c,", "matirx Output: dist is a NxM matrix where dist[i,j] is", "idx, output) return output @staticmethod def backward(ctx, a=None): return None,", "nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b, n, m, radius, nsample, nclass, new_xyz, xyz, label_stat,", "torch.Tensor, weight: torch.Tensor) -> torch.Tensor: \"\"\" Performs weight linear interpolation", "None, None labelstat_idx = LabelStatIdx.apply class LabelStatAndBallQuery(Function): @staticmethod def forward(ctx,", "= max_feature.size() m = distribute_idx.size(1) distribute_feature = torch.cuda.FloatTensor(b, c, m).zero_()", "grouped_features], dim=1) # (b, c+3, 1, n) else: new_features =", "b, c, m = grad_distribute_feature.size() grad_max_feature = torch.cuda.FloatTensor(b, c, n).zero_()", "output) return output @staticmethod def backward(ctx, a=None): return None, None", "def forward(ctx, unknown: torch.Tensor, known: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\"", "torch.mm(x, y_t) import numpy as np return torch.clamp(dist, 0.0, np.inf)", "b, m, _ = new_xyz.size() n = xyz.size(1) ''' idx", "b=None): return None, None nearestneighbor = NearestNeighbor.apply class Interpolation(Function): @staticmethod", "output = torch.cuda.LongTensor(b, c, m, nsample) pointops_cuda.grouping_int_forward_cuda(b, c, n, m,", "new_xyz, xyz, idx) return idx @staticmethod def backward(ctx, a=None): return", "- 2.0 * torch.mm(x, y_t) import numpy as np return", "idx, weight, output) return output @staticmethod def backward(ctx, grad_out: torch.Tensor)", "torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b, n, m, radius, nclass, new_xyz, xyz,", "xyz, label_stat, new_label_stat) return new_label_stat @staticmethod def backward(ctx, a=None): return", "n, 3) pointops_cuda.nearestneighbor_cuda(b, n, m, unknown, known, dist2, idx) return", "assert idx.is_contiguous() b, c, n = features.size() m = idx.size(1)", "nsample: :param label_stat: (b, n, nclass) :param idx: (b, m,", "and not use xyz as a feature!\" new_features = grouped_xyz", "backward(ctx, a=None): return None, None featuredistribute = FeatureDistribute.apply class FeatureGather(Function):", "return idx @staticmethod def backward(xyz, a=None): return None, None furthestsampling", "m, nsample, features, idx, output) return output @staticmethod def backward(ctx,", "return output @staticmethod def backward(ctx, a=None): return None, None grouping_int", "not None: grouped_features = grouping(features, idx) # (b, c, m,", "idx @staticmethod def backward(ctx): return None, None, None knnquery_exclude =", "import numpy as np class FurthestSampling(Function): @staticmethod def forward(ctx, xyz,", "3) centriods output: new_features: (b, m, nsample) \"\"\" if new_xyz", "torch.Tensor) -> torch.Tensor: \"\"\" input: features: (b, c, n), idx", "features, idx, output) ctx.for_backwards = (idx, n) return output @staticmethod", "the ball query output: (b, m, nsample) tensor with the", "None, None ballquery = BallQuery.apply class FeatureDistribute(Function): @staticmethod def forward(ctx,", "the features new_xyz: (b, m, 3) centriods output: idx: (b,", "_ = new_xyz.size() n = xyz.size(1) idx = torch.cuda.IntTensor(b, m,", "grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b,", "grad_out_data, idx, grad_features.data) return grad_features, None grouping = Grouping.apply class", "None: new_xyz = xyz assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, m,", "int32, Number of neighbor xyz: (b, n, 3) coordinates of", "featuregather = FeatureGather.apply class LabelStatBallRange(Function): @staticmethod def forward(ctx, radius: float,", "new_xyz, xyz, label_stat, idx, new_label_stat) return new_label_stat, idx @staticmethod def", "not features and not use xyz as a feature!\" new_features", "@staticmethod def backward(ctx, a=None): return None, None, None labelstat_idx =", "ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Gen_QueryAndGroupXYZ, self).__init__() self.radius,", "n, 3) weights output: (b, c, n) tensor of the", "def forward(ctx, features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: \"\"\" input:", "n = grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, m).zero_() grad_out_data =", "torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\" KNN Indexing input: nsample:", "None, None, None, None ballquery = BallQuery.apply class FeatureDistribute(Function): @staticmethod", "= None) -> Tuple[torch.Tensor]: \"\"\" KNN Indexing input: nsample: int32,", "assert xyz.is_contiguous() b, n, _ = max_xyz.size() m = xyz.size(1)", "3, m, nsample) return grouped_xyz class Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\" Groups with", "idx: (b, m, nsample) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert", "None, None, None ballquery = BallQuery.apply class FeatureDistribute(Function): @staticmethod def", "nclass) :param idx: (b, m, nsample) :return: new_label_stat: (b, m,", "if new_xyz is None: new_xyz = xyz #if idx is", ":return: distribute_idx: (b, m) \"\"\" assert max_xyz.is_contiguous() assert xyz.is_contiguous() b,", "torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b, c, n, m,", "None: grouped_features = grouping(features, idx) # (b, c, m, nsample)", "= np.array([i for i in range(2*self.nsample)]) np.random.shuffle(idx2) idx2 = idx2[:self.nsample]", "features.size() n = idx.size(1) ctx.interpolation_for_backward = (idx, weight, m) output", "None: idx = ballquery(self.radius, self.nsample, xyz, new_xyz) else: # idx", "is not None: grouped_features = grouping(features, idx) # (b, c,", "output = torch.cuda.FloatTensor(b, c, m) pointops_cuda.gathering_forward_cuda(b, c, n, m, features,", "n) return output @staticmethod def backward(ctx, grad_out): idx, c, n", "torch.Tensor, xyz: torch.Tensor) -> torch.Tensor: \"\"\" :param ctx: :param max_xyz:", "xyz, new_xyz) # (b, m, nsample) idx = knnquery(2*self.nsample, xyz,", "torch.Tensor]: \"\"\" input: grad_out: (b, c, m, nsample) output: (b,", "output @staticmethod def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:", "None featuregather = FeatureGather.apply class LabelStatBallRange(Function): @staticmethod def forward(ctx, radius:", "\"\"\" assert unknown.is_contiguous() assert known.is_contiguous() b, n, _ = unknown.size()", "= new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() idx = torch.cuda.IntTensor(b,", "0:nsample].int() # ''' return idx @staticmethod def backward(ctx): return None,", "xyz.size() m = new_xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.ballquery_cuda(b,", "nsample=32, use_xyz=True): super(Le_QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample,", "backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b,", "@staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor =", "= LabelStatAndBallQuery.apply def pairwise_distances(x, y=None): ''' Input: x is a", "None knnquery = KNNQuery.apply class KNNQueryExclude(Function): @staticmethod def forward(ctx, nsample:", "the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_SameSize, self).__init__()", "= KNNQueryNaive.apply class KNNQuery(Function): @staticmethod def forward(ctx, nsample: int, xyz:", "# (b, c+3, m, nsample) # le new_features = grouped_features", "new_xyz.transpose(1, 2).unsqueeze(-1) if features is not None: grouped_features = grouping(features,", "xyz, m): \"\"\" input: xyz: (b, n, 3) and n", "= ctx.for_backwards b, c, m, nsample = grad_out.size() grad_features =", "(idx, weight, m) output = torch.cuda.FloatTensor(b, c, n) pointops_cuda.interpolation_forward_cuda(b, c,", "# (b, m, nsample) idx = knnquery(self.nsample, xyz, new_xyz) #", "\"\"\" input: xyz: (b, n, 3) and n > m,", "None, None grouping_int = GroupingInt.apply class BallQuery(Function): @staticmethod def forward(ctx,", "n, m, radius, nclass, new_xyz, xyz, label_stat, new_label_stat) return new_label_stat", "grad_out_data, idx, grad_features.data) return grad_features, None gathering = Gathering.apply class", "new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_idx_cuda(b, n, m, nsample, nclass,", "nclass).zero_() pointops_cuda.labelstat_idx_cuda(b, n, m, nsample, nclass, label_stat, idx, new_label_stat) return", "1, n).view(b, m * n, 3) # xyz_repeat = xyz.repeat(1,", "feature!\" new_features = grouped_xyz return grouped_xyz, new_features class QueryAndGroup(nn.Module): \"\"\"", "a=None): return None, None, None, None ballquery = BallQuery.apply class", "m, m: int32 output: idx: (b, m) \"\"\" assert xyz.is_contiguous()", "def forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor = None)", "= use_xyz def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor", "xyz, new_xyz) # (b, m, nsample) idx = knnquery(self.nsample, xyz,", "tensor \"\"\" grouped_xyz = xyz.transpose(1, 2).unsqueeze(2) if features is not", "(b, m) :return: distribute_feature: (b, c, m) ''' assert max_feature.is_contiguous()", "features.size() m = idx.size(1) output = torch.cuda.FloatTensor(b, c, m) pointops_cuda.gathering_forward_cuda(b,", "= grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous()", "= grouped_xyz return new_features class QueryAndGroup_Dilate(nn.Module): \"\"\" Groups with a", "pointops_cuda.interpolation_backward_cuda(b, c, n, m, grad_out_data, idx, weight, grad_features.data) return grad_features,", "0, 1) y_norm = (y ** 2).sum(1).view(1, -1) else: y_t", "nearest neighbors of the target features in features weight: (b,", "FeatureGather(Function): @staticmethod def forward(ctx, max_feature: torch.Tensor, distribute_idx: torch.Tensor) -> torch.Tensor:", "features is not None: grouped_features = grouping(features, idx) if self.use_xyz:", "= ctx.for_backwards b, m = idx.size() grad_features = torch.cuda.FloatTensor(b, c,", "grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b, c, n, m, grad_out_data, idx, grad_features.data) return grad_features,", "None, None knnquery_exclude = KNNQueryExclude.apply class Le_QueryAndGroup_SameSize(nn.Module): \"\"\" Groups with", "if y is not None: y_t = torch.transpose(y, 0, 1)", "features is not None: grouped_features = grouping(features, idx) # (b,", "torch.Tensor = None, idx: torch.Tensor = None) -> torch.Tensor: def", "m, features, idx, output) ctx.for_backwards = (idx, c, n) return", "= (idx, c, n) return output @staticmethod def backward(ctx, grad_out):", "norm between x[i,:] and y[j,:] if y is not given", "forward(ctx, features: torch.Tensor, idx: torch.Tensor, weight: torch.Tensor) -> torch.Tensor: \"\"\"", "m, n, features, idx, weight, output) return output @staticmethod def", "c, m = grad_distribute_feature.size() grad_max_feature = torch.cuda.FloatTensor(b, c, n).zero_() grad_distribute_feature_data", "Number of neighbor xyz: (b, n, 3) coordinates of the", "else: assert self.use_xyz, \"Cannot have not features and not use", "nsample) \"\"\" if new_xyz is None: new_xyz = xyz b,", "grouped_features # (b, c, m, nsample) else: new_features = grouped_features", "distribute_idx, distribute_feature) ctx.for_backwards = (distribute_idx, n) return distribute_feature @staticmethod def", "= torch.cuda.FloatTensor(b, c, m) pointops_cuda.gathering_forward_cuda(b, c, n, m, features, idx,", "new_xyz is None: new_xyz = xyz b, m, _ =", "radius=None, nsample=32, use_xyz=True): super(QueryAndGroup_Dilate, self).__init__() self.radius, self.nsample, self.use_xyz = radius,", "xyz: (b, m, 3) :return: distribute_idx: (b, m) \"\"\" assert", "c, n) tensor of the interpolated features \"\"\" assert features.is_contiguous()", "is not None: idx = ballquery(self.radius, 2*self.nsample, xyz, new_xyz) else:", "m, nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b, n, m, radius, nclass, new_xyz, xyz, label_stat,", "nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor) -> torch.Tensor: \"\"\" input:", "dim=1) # (b, c+3, 1, n) else: new_features = grouped_features", "idx: idx of neighbors # idxs: (b, n) output: new_features:", "assert weight.is_contiguous() b, c, m = features.size() n = idx.size(1)", "3) coordinates of the features new_xyz: (b, n, 3) centriods", "featuredistribute = FeatureDistribute.apply class FeatureGather(Function): @staticmethod def forward(ctx, max_feature: torch.Tensor,", "m, nsample) pointops_cuda.grouping_forward_cuda(b, c, n, m, nsample, features, idx, output)", "float, radius of the balls nsample: int, maximum number of", "of the ball query output: (b, m, nsample) tensor with", "output: (b, c, m) \"\"\" assert features.is_contiguous() assert idx.is_contiguous() b,", "containing the indicies of features to group with output: (b,", "Le_QueryAndGroup_SameSize(nn.Module): \"\"\" Groups with a ball query of radius parameters:", "linear interpolation on 3 features input: features: (b, c, m)", "idx, grad_features.data) return grad_features, None gathering = Gathering.apply class NearestNeighbor(Function):", "ball query of radius parameters: radius: float32, Radius of ball", "neighbors of the target features in features weight: (b, n,", "the features new_xyz: (b, m, 3) centriods features: (b, c,", "nsample) containing the indicies of features to group with output:", "torch.Tensor = None) -> torch.Tensor: def forward(self, xyz: torch.Tensor, new_xyz:", "Tuple[torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b, c, m, nsample) output:", "torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor) -> torch.Tensor: ''' :param ctx:", "torch.Tensor) -> torch.Tensor: ''' :param ctx: :param radius: :param xyz:", "nsample = idx.size() output = torch.cuda.FloatTensor(b, c, m, nsample) pointops_cuda.grouping_forward_cuda(b,", "-1) dist = x_norm + y_norm - 2.0 * torch.mm(x,", "features new_xyz: (b, m, 3) centriods output: new_features: (b, m,", "grouped_xyz, new_features class Gen_QueryAndGroupXYZ(nn.Module): \"\"\" Groups with a ball query", "torch.cuda.IntTensor(b, n, 3) pointops_cuda.nearestneighbor_cuda(b, n, m, unknown, known, dist2, idx)", "of the target features in features weight: (b, n, 3)", "-1) else: y_t = torch.transpose(x, 0, 1) y_norm = x_norm.view(1,", "radius: float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor):", "new_features = grouped_features else: assert self.use_xyz, \"Cannot have not features", "y is an optional Mxd matirx Output: dist is a", "output @staticmethod def backward(ctx, a=None): return None, None grouping_int =", "if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3,", "ctx.interpolation_for_backward = (idx, weight, m) output = torch.cuda.FloatTensor(b, c, n)", "n, m, nsample, features, idx, output) return output @staticmethod def", "distribute_idx.is_contiguous() b, c, n = max_feature.size() m = distribute_idx.size(1) distribute_feature", "return None, None, None labelstat_idx = LabelStatIdx.apply class LabelStatAndBallQuery(Function): @staticmethod", "features to gather in the ball \"\"\" def __init__(self, radius=None,", "forward(ctx, features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: \"\"\" input: features:", "is not given then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2", "indicies of the features that form the query balls \"\"\"", "label_stat: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param radius: :param", "= known.size(1) dist2 = torch.cuda.FloatTensor(b, n, 3) idx = torch.cuda.IntTensor(b,", "nsample) pointops_cuda.grouping_forward_cuda(b, c, n, m, nsample, features, idx, output) ctx.for_backwards", "Grouping.apply class GroupingInt(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor)", "None ''' distribute_idx, n = ctx.for_backwards b, c, m =", "idx @staticmethod def backward(ctx): return None, None, None knnquery_naive =", "@staticmethod def forward(ctx, max_xyz: torch.Tensor, xyz: torch.Tensor) -> torch.Tensor: \"\"\"", "nclass, label_stat, idx, new_label_stat) return new_label_stat @staticmethod def backward(ctx, a=None):", "nearest neighbors of unknown in known input: unknown: (b, n,", "tensor output: (b, c, m) \"\"\" assert features.is_contiguous() assert idx.is_contiguous()", "LabelStatAndBallQuery(Function): @staticmethod def forward(ctx, radius: float, nsample: int, xyz: torch.Tensor,", "idx: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param nsample: :param", "= grouping(features, idx) # (b, c, m, nsample) if self.use_xyz:", "idx) if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b,", "torch.cuda.FloatTensor(b, n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b, n, m, xyz, temp, idx) return idx", "input: grad_out: (b, c, m, nsample) output: (b, c, n),", "c, m, nsample) \"\"\" assert features.is_contiguous() assert idx.is_contiguous() b, c,", "c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b, c, n, m, grad_out_data,", "= ballquery(self.radius, self.nsample, xyz, new_xyz) else: # idx = knnquery_naive(self.nsample,", "new_xyz) # (b, m, nsample) idx2 = np.array([i for i", "return new_features class Le_QueryAndGroup(nn.Module): \"\"\" Groups with a ball query", "= torch.cuda.FloatTensor(b, c, m).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b, c, n,", "xyz: torch.Tensor, new_xyz: torch.Tensor) -> torch.Tensor: \"\"\" input: radius: float,", "nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor): ''' :param", "centriods features: (b, c, n) idx: idx of neighbors #", "distribute_idx: (b, m) :return: distribute_feature: (b, c, m) ''' assert", "m) temp = torch.cuda.FloatTensor(b, n).fill_(1e10) pointops_cuda.furthestsampling_cuda(b, n, m, xyz, temp,", "idx = knnquery_naive(self.nsample, xyz, new_xyz) # (b, m, nsample) idx", "None: idx = ballquery(self.radius, self.nsample, xyz, new_xyz) else: idx =", "torch.zeros(b, m, nsample).int().cuda() for i in range(b): dist = pairwise_distances(new_xyz[i,", "have not features and not use xyz as a feature!\"", "n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b, c, n, m, nsample, grad_out_data,", "new_label_stat @staticmethod def backward(ctx, a=None): return None, None, None, None", "a feature!\" new_features = grouped_xyz return grouped_xyz, new_features class Gen_QueryAndGroupXYZ(nn.Module):", "coordinates of the features new_xyz: torch.Tensor, (b, m, 3) centers", "assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, m, _ = new_xyz.size() n", "interpolated features \"\"\" assert features.is_contiguous() assert idx.is_contiguous() assert weight.is_contiguous() b,", "backward(ctx, a=None): return None, None grouping_int = GroupingInt.apply class BallQuery(Function):", "m, nsample) \"\"\" if new_xyz is None: new_xyz = xyz", "= xyz #if idx is None: if self.radius is not", "xyz, label_stat, idx, new_label_stat) return new_label_stat, idx @staticmethod def backward(ctx,", "gathering = Gathering.apply class NearestNeighbor(Function): @staticmethod def forward(ctx, unknown: torch.Tensor,", "n).zero_() grad_distribute_feature_data = grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b, n, m, c, grad_distribute_feature_data, distribute_idx,", "distribute_idx: (b, m) \"\"\" assert max_xyz.is_contiguous() assert xyz.is_contiguous() b, n,", "idx @staticmethod def backward(ctx, a=None, b=None): return None, None, None,", "= torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b, n, m, radius, nsample, nclass,", "weight, grad_features.data) return grad_features, None, None interpolation = Interpolation.apply class", "# (b, m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous() # BxNx3", "= torch.transpose(x, 0, 1) y_norm = x_norm.view(1, -1) dist =", "None featuredistribute = FeatureDistribute.apply class FeatureGather(Function): @staticmethod def forward(ctx, max_feature:", "is None: new_xyz = xyz b, m, _ = new_xyz.size()", "\"\"\" idx, weight, m = ctx.interpolation_for_backward b, c, n =", "grouped_idxs = grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample) grouped_xyz -=", "nsample).int().cuda() for i in range(b): dist = pairwise_distances(new_xyz[i, :, :],", "n, nclass = label_stat.size() m = new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b,", "nsample, new_xyz, xyz, idx) return idx @staticmethod def backward(ctx, a=None):", "class NearestNeighbor(Function): @staticmethod def forward(ctx, unknown: torch.Tensor, known: torch.Tensor) ->", "new_features = grouped_xyz return grouped_xyz, new_features class QueryAndGroup(nn.Module): \"\"\" Groups", "-> torch.Tensor: \"\"\" input: radius: float, radius of the balls", "assert new_xyz.is_contiguous() b, n, _ = xyz.size() m = new_xyz.size(1)", ":param label_stat: (b, n, nclass) :return: new_label_stat: (b, m, nclass)", "xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor): ''' :param ctx: :param", "(idx, n) return output @staticmethod def backward(ctx, grad_out: torch.Tensor) ->", "(b, c, n), idx : (b, m) tensor output: (b,", "m, nsample) idx = knnquery(2*self.nsample, xyz, new_xyz) # (b, m,", "(b, c+3, m, nsample) # grouped_idxs: (b, m, nsample) \"\"\"", "the square norm between x[i,:] and y[j,:] if y is", "m, _ = new_xyz.size() n = xyz.size(1) idx = torch.cuda.IntTensor(b,", "None, None, None knnquery = KNNQuery.apply class KNNQueryExclude(Function): @staticmethod def", "b, c, n = grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, m).zero_()", "(b, c+3, m, nsample) else: new_features = grouped_features else: assert", "new_xyz: (b, m, 3) centriods output: idx: (b, m, nsample)", "new_xyz is None: new_xyz = xyz assert xyz.is_contiguous() assert new_xyz.is_contiguous()", "''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous() b, n, nclass", "n, m, features, idx, output) ctx.for_backwards = (idx, c, n)", "label_stat: torch.Tensor): ''' :param ctx: :param radius: :param nsample: :param", "n, m, nsample, grad_out_data, idx, grad_features.data) return grad_features, None grouping", "m, 3) output: dist2: (b, n, 3) l2 distance to", "NxM matrix where dist[i,j] is the square norm between x[i,:]", "Nxd matrix y is an optional Mxd matirx Output: dist", "self.use_xyz = use_xyz def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor, features:", "(b, n) output: new_features: (b, c+3, m, nsample) # grouped_idxs:", "torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\" input: xyz: (b, n,", "number of features to gather in the ball \"\"\" def", "use_xyz=True): super(Le_QueryAndGroup_SameSize, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz", "nclass = label_stat.size() m = idx.size(1) new_label_stat = torch.cuda.IntTensor(b, m,", "known, dist2, idx) return torch.sqrt(dist2), idx @staticmethod def backward(ctx, a=None,", "where dist[i,j] is the square norm between x[i,:] and y[j,:]", "''' distribute_idx, n = ctx.for_backwards b, c, m = grad_distribute_feature.size()", "* n, 3) # dist = (new_xyz_repeat - xyz_repeat).pow(2).sum(dim=2).view(b, m,", "xyz: (b, n, 3) coordinates of the features new_xyz: (b,", "= ballquery(self.radius, 2*self.nsample, xyz, new_xyz) else: # idx = knnquery_naive(self.nsample,", "= grad_distribute_feature.data.contiguous() pointops_cuda.featuregather_backward_cuda(b, n, m, c, grad_distribute_feature_data, distribute_idx, grad_max_feature.data) return", "* n, 3)).pow(2).sum(dim=2).view(b, m, n) [_, idxs] = torch.sort(dist, dim=2)", "= new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b, n, m,", "Tuple[torch.Tensor, torch.Tensor]: \"\"\" Find the three nearest neighbors of unknown", "weight, m) output = torch.cuda.FloatTensor(b, c, n) pointops_cuda.interpolation_forward_cuda(b, c, m,", "(b, m, nclass) ''' assert label_stat.is_contiguous() assert idx.is_contiguous() b, n,", "then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' x_norm =", "unknown, known, dist2, idx) return torch.sqrt(dist2), idx @staticmethod def backward(ctx,", "new_xyz: torch.Tensor = None) -> torch.Tensor: \"\"\" input: xyz: (b,", "Groups with a ball query of radius parameters: radius: float32,", "= torch.cuda.FloatTensor(b, c, m, nsample) pointops_cuda.grouping_forward_cuda(b, c, n, m, nsample,", "dim=1) # (b, c+3, m, nsample) # le new_features =", "of the interpolated features \"\"\" assert features.is_contiguous() assert idx.is_contiguous() assert", "torch.Tensor, label_stat: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param radius:", "1).view(b, m * n, 3) # dist = (new_xyz_repeat -", "grouping(xyz_trans, idx) # (b, 3, m, nsample) # grouped_idxs =", "super(QueryAndGroup_Dilate, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz def", "xyz, temp, idx) return idx @staticmethod def backward(xyz, a=None): return", "input: features: (b, c, n), idx : (b, m) tensor", "output) return output @staticmethod def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor,", "idx, weight, m = ctx.interpolation_for_backward b, c, n = grad_out.size()", "\"\"\" assert features.is_contiguous() assert idx.is_contiguous() assert weight.is_contiguous() b, c, m", "three nearest neighbors of unknown in known input: unknown: (b,", "3) and n > m, m: int32 output: idx: (b,", "output: idx: (b, m) \"\"\" assert xyz.is_contiguous() b, n, _", "(b, m, nsample) ( dist2: (b, m, nsample) ) \"\"\"", "-= new_xyz.transpose(1, 2).unsqueeze(-1) if features is not None: grouped_features =", "m = idx.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_idx_cuda(b, n,", "pointops_cuda.knnquery_cuda(b, n, m, nsample, xyz, new_xyz, idx, dist2) return idx", "= True): super(GroupAll, self).__init__() self.use_xyz = use_xyz def forward(self, xyz:", "m) \"\"\" assert max_xyz.is_contiguous() assert xyz.is_contiguous() b, n, _ =", "xyz.size() == new_xyz.size() if new_xyz is None: new_xyz = xyz", "of ball nsample: int32, Maximum number of features to gather", "n, m, nsample, nclass, label_stat, idx, new_label_stat) return new_label_stat @staticmethod", "the three nearest neighbors idx: (b, n, 3) index of", "output: new_features: (b, c+3, m, nsample) # grouped_idxs: (b, m,", "torch.cuda.IntTensor(b, m, nclass).zero_() idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b, n,", "= new_xyz.repeat(1, 1, n).view(b, m * n, 3) # xyz_repeat", "backward(ctx, a=None): return None, None, None, None labelstat_ballrange = LabelStatBallRange.apply", "of the features new_xyz: (b, m, 3) centriods features: (b,", "\"\"\" grouped_xyz = xyz.transpose(1, 2).unsqueeze(2) if features is not None:", "grad_out_data, idx, weight, grad_features.data) return grad_features, None, None interpolation =", "n = ctx.for_backwards b, c, m, nsample = grad_out.size() grad_features", "c, n) descriptors of the features output: new_features: (b, c+3,", "None) -> torch.Tensor: \"\"\" input: xyz: (b, n, 3) coordinates", ":param nsample: :param label_stat: (b, n, nclass) :param idx: (b,", "= LabelStatBallRange.apply class LabelStatIdx(Function): @staticmethod def forward(ctx, nsample: int, label_stat:", ":param ctx: :param max_feature: (b, c, n) :param distribute_idx: (b,", "y_norm = (y ** 2).sum(1).view(1, -1) else: y_t = torch.transpose(x,", "torch.Tensor, known: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" Find the three", "= ||x[i,:]-y[j,:]||^2 ''' x_norm = (x ** 2).sum(1).view(-1, 1) if", "a=None): return None, None featuredistribute = FeatureDistribute.apply class FeatureGather(Function): @staticmethod", "and n > m, m: int32 output: idx: (b, m)", "''' :param ctx: :param max_feature: (b, c, n) :param distribute_idx:", "backward(ctx, a=None, b=None): return None, None, None, None, None labelstat_and_ballquery", "input: unknown: (b, n, 3), known: (b, m, 3) output:", "int, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor): ''' :param ctx:", "= grouping(features, idx) if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1)", "idx = knnquery(2*self.nsample, xyz, new_xyz) # (b, m, nsample) idx2", "nsample, use_xyz #def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None,", "grouped_xyz return new_features class GroupAll(nn.Module): \"\"\" Groups all features \"\"\"", "return distribute_idx @staticmethod def backward(ctx, a=None): return None, None featuredistribute", "torch.Tensor): ''' :param ctx: :param grad_distribute_feature: (b, c, m) :return:", "= label_stat.size() m = idx.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_()", "Radius of ball nsample: int32, Maximum number of features to", "def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz", "NearestNeighbor.apply class Interpolation(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor,", "torch.transpose(x, 0, 1) y_norm = x_norm.view(1, -1) dist = x_norm", "= max_xyz.size() m = xyz.size(1) distribute_idx = torch.cuda.IntTensor(b, m).zero_() pointops_cuda.featuredistribute_cuda(b,", "torch.Tensor: ''' :param ctx: :param radius: :param xyz: (b, n,", "c, m) pointops_cuda.gathering_forward_cuda(b, c, n, m, features, idx, output) ctx.for_backwards", "super(Gen_QueryAndGroupXYZ, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz #def", "ctx: :param nsample: :param label_stat: (b, n, nclass) :param idx:", "n, _ = unknown.size() m = known.size(1) dist2 = torch.cuda.FloatTensor(b,", "= ballquery(self.radius, self.nsample, xyz, new_xyz) else: idx = knnquery(self.nsample, xyz,", "# xyz_repeat = xyz.repeat(1, m, 1).view(b, m * n, 3)", "nsample, nclass, new_xyz, xyz, label_stat, idx, new_label_stat) return new_label_stat, idx", "features new_xyz: (b, m, 3) centriods features: (b, c, n)", "m * n, 3) # dist = (new_xyz_repeat - xyz_repeat).pow(2).sum(dim=2).view(b,", "torch.Tensor: \"\"\" input: xyz: (b, n, 3) coordinates of the", "(b, c, n) :param distribute_idx: (b, m) :return: distribute_feature: (b,", "new_xyz.repeat(1, 1, n).view(b, m * n, 3) # xyz_repeat =", "new_features: (b, m, nsample) \"\"\" if new_xyz is None: new_xyz", ":return: distribute_feature: (b, c, m) ''' assert max_feature.is_contiguous() assert distribute_idx.is_contiguous()", "n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b, c, n, m, grad_out_data, idx,", "idx: (b, n, 3) three nearest neighbors of the target", "(b, m, nsample) #grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features is", "features.is_contiguous() assert idx.is_contiguous() b, c, n = features.size() m =", "b, n, _ = unknown.size() m = known.size(1) dist2 =", "torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\"", "not None: grouped_features = grouping(features, idx) if self.use_xyz: new_features =", "''' :param ctx: :param grad_distribute_feature: (b, c, m) :return: grad_max_feature:", "the features that form the query balls \"\"\" assert xyz.is_contiguous()", "Interpolation.apply class Grouping(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor)", "return new_features class QueryAndGroup_Dilate(nn.Module): \"\"\" Groups with a ball query", "n, 3) centriods features: (b, c, n) idx: idx of", "xyz, new_xyz) else: # idx = knnquery_naive(self.nsample, xyz, new_xyz) #", "dim=2) idx = idxs[:, :, 0:nsample].int() # ''' return idx", "(b, m, nsample) grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features is", "n, 3) and n > m, m: int32 output: idx:", "\"\"\" assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, n, _ = xyz.size()", "None, features: torch.Tensor = None, idx: torch.Tensor = None) ->", "the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup_Dilate, self).__init__()", "new_xyz: (b, m, 3) :param label_stat: (b, n, nclass) :return:", "n, 3) xyz coordinates of the features new_xyz: torch.Tensor, (b,", "to group with output: (b, c, m, nsample) \"\"\" assert", "None interpolation = Interpolation.apply class Grouping(Function): @staticmethod def forward(ctx, features:", "grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b,", "forward(ctx, radius: float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat:", "nsample: int, label_stat: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: ''' :param", "new_xyz: torch.Tensor = None, features: torch.Tensor = None, idx: torch.Tensor", "= BallQuery.apply class FeatureDistribute(Function): @staticmethod def forward(ctx, max_xyz: torch.Tensor, xyz:", "new_xyz = xyz b, m, _ = new_xyz.size() n =", "features is not None: grouped_features = features.unsqueeze(2) if self.use_xyz: new_features", "torch.Tensor) -> torch.Tensor: ''' :param ctx: :param max_feature: (b, c,", "\"\"\" input: features: (b, c, n), idx : (b, m,", "None, None, None knnquery_naive = KNNQueryNaive.apply class KNNQuery(Function): @staticmethod def", "0:nsample] ''' # ''' # new_xyz_repeat = new_xyz.repeat(1, 1, n).view(b,", "(b, n, 3) weights output: (b, c, n) tensor of", "(b, m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans,", "feature!\" new_features = grouped_xyz return grouped_xyz, new_features class Gen_QueryAndGroupXYZ(nn.Module): \"\"\"", "m, nsample) output: (b, c, n), None \"\"\" idx, n", "new_features class Le_QueryAndGroup(nn.Module): \"\"\" Groups with a ball query of", "= FurthestSampling.apply class Gathering(Function): @staticmethod def forward(ctx, features, idx): \"\"\"", "is the square norm between x[i,:] and y[j,:] if y", "assert max_xyz.is_contiguous() assert xyz.is_contiguous() b, n, _ = max_xyz.size() m", "xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous() b, n, nclass = label_stat.size()", "centriods output: idx: (b, m, nsample) ( dist2: (b, m,", "forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor = None) ->", "KNNQueryNaive.apply class KNNQuery(Function): @staticmethod def forward(ctx, nsample: int, xyz: torch.Tensor,", "3) index of 3 nearest neighbors \"\"\" assert unknown.is_contiguous() assert", "m, nsample) if self.use_xyz: #new_features = torch.cat([grouped_xyz, grouped_features], dim=1) #", "def forward(ctx, max_feature: torch.Tensor, distribute_idx: torch.Tensor) -> torch.Tensor: ''' :param", "\"\"\" if new_xyz is None: new_xyz = xyz if idx", "of the features new_xyz: torch.Tensor, (b, m, 3) centers of", "m, radius, nsample, nclass, new_xyz, xyz, label_stat, idx, new_label_stat) return", "= None) -> torch.Tensor: def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor", "grad_features = torch.cuda.FloatTensor(b, c, m).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b, c,", "(b, c, n) descriptors of the features output: new_features: (b,", "= grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample) grouped_xyz -= new_xyz.transpose(1,", "with the indicies of the features that form the query", "forward(ctx, radius: float, xyz: torch.Tensor, new_xyz: torch.Tensor, label_stat: torch.Tensor) ->", "grouped_xyz return grouped_xyz, new_features class QueryAndGroup(nn.Module): \"\"\" Groups with a", "\"Cannot have not features and not use xyz as a", "grouping = Grouping.apply class GroupingInt(Function): @staticmethod def forward(ctx, features: torch.Tensor,", "is None: new_xyz = xyz assert xyz.is_contiguous() assert new_xyz.is_contiguous() b,", "the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_OnlyFeature, self).__init__()", "pairwise_distances(new_xyz[i, :, :], xyz[i, :, :]) [_, idxs] = torch.sort(dist,", "nclass, new_xyz, xyz, label_stat, new_label_stat) return new_label_stat @staticmethod def backward(ctx,", "else: new_features = grouped_features else: new_features = grouped_xyz return new_features", "nclass) :return: new_label_stat: (b, m, nclass) ''' assert xyz.is_contiguous() assert", ":return: grad_max_feature: (b, c, n), None ''' distribute_idx, n =", "idx = idxs[:, :, 0:nsample].int() # ''' return idx @staticmethod", "input: nsample: int32, Number of neighbor xyz: (b, n, 3)", "torch.Tensor: \"\"\" input: features: (b, c, n), idx : (b,", "optional Mxd matirx Output: dist is a NxM matrix where", "n, m, c, max_feature, distribute_idx, distribute_feature) ctx.for_backwards = (distribute_idx, n)", "ballquery(self.radius, self.nsample, xyz, new_xyz) else: idx = knnquery(self.nsample, xyz, new_xyz)", "int, xyz: torch.Tensor, new_xyz: torch.Tensor) -> torch.Tensor: \"\"\" input: radius:", "n, m, c, grad_distribute_feature_data, distribute_idx, grad_max_feature.data) return grad_max_feature, None featuregather", "KNN Indexing input: nsample: int32, Number of neighbor xyz: (b,", "3) coordinates of the features new_xyz: (b, m, 3) centriods", "query of radius parameters: radius: float32, Radius of ball nsample:", "grouping(features, idx) if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1) #", "import torch from torch.autograd import Function import torch.nn as nn", "c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b, c, n, m, nsample,", "m, grad_out_data, idx, grad_features.data) return grad_features, None gathering = Gathering.apply", "#xyz_trans = xyz.transpose(1, 2).contiguous() #grouped_xyz = grouping(xyz_trans, idx) # (b,", "label_stat, idx, new_label_stat) return new_label_stat @staticmethod def backward(ctx, a=None): return", "# (b, 3, m, nsample) return grouped_xyz class Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\"", "grad_max_feature.data) return grad_max_feature, None featuregather = FeatureGather.apply class LabelStatBallRange(Function): @staticmethod", "idx.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b,", "nsample=32, use_xyz=True): super(QueryAndGroup_Dilate, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample,", "m, nsample) idx2 = np.array([i for i in range(2*self.nsample)]) np.random.shuffle(idx2)", "features: torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\" input: xyz: (b,", "nclass) ''' assert label_stat.is_contiguous() assert idx.is_contiguous() b, n, nclass =", "= torch.sort(dist, dim=2) idx = idxs[:, :, 1:nsample+1].int() # '''", "x_norm = (x ** 2).sum(1).view(-1, 1) if y is not", "m) ''' assert max_feature.is_contiguous() assert distribute_idx.is_contiguous() b, c, n =", "torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" Find the three nearest neighbors", ":return: new_label_stat: (b, m, nclass) ''' assert label_stat.is_contiguous() assert idx.is_contiguous()", "xyz_repeat).pow(2).sum(dim=2).view(b, m, n) dist = (new_xyz.repeat(1, 1, n).view(b, m *", "1).view(b, m * n, 3)).pow(2).sum(dim=2).view(b, m, n) [_, idxs] =", "(b, m, nsample) #xyz_trans = xyz.transpose(1, 2).contiguous() #grouped_xyz = grouping(xyz_trans,", "-> torch.Tensor: def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None)", "new_xyz: torch.Tensor, label_stat: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param", "xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans, idx) # (b,", "radius parameters: radius: float32, Radius of ball nsample: int32, Maximum", "nsample) # grouped_idxs: (b, m, nsample) \"\"\" assert xyz.size() ==", "m, c, max_feature, distribute_idx, distribute_feature) ctx.for_backwards = (distribute_idx, n) return", "(b, m, 3) centriods output: idx: (b, m, nsample) \"\"\"", "-> torch.Tensor: \"\"\" input: xyz: (b, n, 3) coordinates of", "for i in range(b): dist = pairwise_distances(new_xyz[i, :, :], xyz[i,", "xyz: torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor = None) -> Tuple[torch.Tensor]:", "nsample) idx = knnquery(self.nsample, xyz, new_xyz) # (b, m, nsample)", "return torch.sqrt(dist2), idx @staticmethod def backward(ctx, a=None, b=None): return None,", "None, None featuredistribute = FeatureDistribute.apply class FeatureGather(Function): @staticmethod def forward(ctx,", "if features is not None: grouped_features = features.unsqueeze(2) if self.use_xyz:", "pointops_cuda import numpy as np class FurthestSampling(Function): @staticmethod def forward(ctx,", "assert xyz.size() == new_xyz.size() if new_xyz is None: new_xyz =", "_ = xyz.size() idx = torch.cuda.IntTensor(b, m) temp = torch.cuda.FloatTensor(b,", "idx) return torch.sqrt(dist2), idx @staticmethod def backward(ctx, a=None, b=None): return", "torch.Tensor: ''' :param ctx: :param nsample: :param label_stat: (b, n,", "np return torch.clamp(dist, 0.0, np.inf) class KNNQueryNaive(Function): @staticmethod def forward(ctx,", "grad_out_data = grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b, c, n, m, grad_out_data, idx, weight,", "def forward(ctx, xyz, m): \"\"\" input: xyz: (b, n, 3)", "radius, nsample, use_xyz def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor =", "xyz, new_xyz) # (b, m, nsample) idx2 = np.array([i for", "features new_xyz: (b, m, 3) centriods output: idx: (b, m,", ":, :]) [_, idxs] = torch.sort(dist, dim=1) idx[i, :, :]", "neighbors # idxs: (b, n) output: new_features: (b, c+3, m,", "grad_features.data) return grad_features, None grouping = Grouping.apply class GroupingInt(Function): @staticmethod", "radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_SameSize, self).__init__() self.radius, self.nsample, self.use_xyz = radius,", "\"\"\" Performs weight linear interpolation on 3 features input: features:", "@staticmethod def backward(ctx, a=None, b=None): return None, None nearestneighbor =", "not None: grouped_features = features.unsqueeze(2) if self.use_xyz: new_features = torch.cat([grouped_xyz,", "(b, m, 3) centriods output: new_features: (b, m, nsample) \"\"\"", "not None: idx = ballquery(self.radius, self.nsample, xyz, new_xyz) else: #", "None, None interpolation = Interpolation.apply class Grouping(Function): @staticmethod def forward(ctx,", "3) :param new_xyz: (b, m, 3) :param label_stat: (b, n,", "max_feature.is_contiguous() assert distribute_idx.is_contiguous() b, c, n = max_feature.size() m =", "QueryAndGroup(nn.Module): \"\"\" Groups with a ball query of radius parameters:", "3 features input: features: (b, c, m) features descriptors to", "torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b, c, m,", "for i in range(2*self.nsample)]) np.random.shuffle(idx2) idx2 = idx2[:self.nsample] idx =", "def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_SameSize, self).__init__() self.radius, self.nsample, self.use_xyz", "torch.cuda.LongTensor(b, c, m, nsample) pointops_cuda.grouping_int_forward_cuda(b, c, n, m, nsample, features,", "output @staticmethod def backward(ctx, grad_out): idx, c, n = ctx.for_backwards", "of the features new_xyz: (b, m, 3) centriods output: new_features:", "not None: idx = ballquery(self.radius, 2*self.nsample, xyz, new_xyz) else: #", "m, nsample) # le new_features = grouped_features # (b, c,", "torch.Tensor, features: torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\" input: xyz:", "#new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, m, nsample)", "of unknown in known input: unknown: (b, n, 3), known:", "distance to the three nearest neighbors idx: (b, n, 3)", "m, grad_out_data, idx, weight, grad_features.data) return grad_features, None, None interpolation", "@staticmethod def forward(ctx, features: torch.Tensor, idx: torch.Tensor, weight: torch.Tensor) ->", "knnquery(self.nsample, xyz, new_xyz) # (b, m, nsample) xyz_trans = xyz.transpose(1,", "n, m, nsample, xyz, new_xyz, idx, dist2) return idx @staticmethod", "self.nsample, self.use_xyz = radius, nsample, use_xyz #def forward(self, xyz: torch.Tensor,", "assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, n, _ = xyz.size() m", "idxs[:, :, 0:nsample].int() # ''' return idx @staticmethod def backward(ctx):", "idx).squeeze(1).int() # (b, m, nsample) grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if", "def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: \"\"\" input:", "3) centers of the ball query output: (b, m, nsample)", "output: (b, m, nsample) tensor with the indicies of the", "(b, c, n) output: grad_features: (b, c, m), None, None", "m, nsample) grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features is not", "torch.Tensor, new_xyz: torch.Tensor = None, features: torch.Tensor = None, idx:", "float, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor) -> torch.Tensor: \"\"\"", "torch.Tensor, new_xyz: torch.Tensor) -> torch.Tensor: \"\"\" input: radius: float, radius", "b, n, nclass = label_stat.size() m = idx.size(1) new_label_stat =", "(b, m, 3) centriods output: idx: (b, m, nsample) (", "the query balls \"\"\" assert xyz.is_contiguous() assert new_xyz.is_contiguous() b, n,", "new_xyz) # (b, m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz", "(b, c, m) ''' assert max_feature.is_contiguous() assert distribute_idx.is_contiguous() b, c,", "label_stat, idx, new_label_stat) return new_label_stat, idx @staticmethod def backward(ctx, a=None,", "c, m, nsample) if self.use_xyz: #new_features = torch.cat([grouped_xyz, grouped_features], dim=1)", "distribute_feature @staticmethod def backward(ctx, grad_distribute_feature: torch.Tensor): ''' :param ctx: :param", "@staticmethod def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" input:", "grouping_int = GroupingInt.apply class BallQuery(Function): @staticmethod def forward(ctx, radius: float,", "grouped_xyz = xyz.transpose(1, 2).unsqueeze(2) if features is not None: grouped_features", "= x_norm.view(1, -1) dist = x_norm + y_norm - 2.0", "= None) -> torch.Tensor: \"\"\" input: xyz: (b, n, 3)", "None, idx: torch.Tensor = None) -> torch.Tensor: \"\"\" input: xyz:", "range(2*self.nsample)]) np.random.shuffle(idx2) idx2 = idx2[:self.nsample] idx = idx[:, :, idx2]", "m).zero_() pointops_cuda.featuredistribute_cuda(b, n, m, max_xyz, xyz, distribute_idx) return distribute_idx @staticmethod", "n, 3) idx = torch.cuda.IntTensor(b, n, 3) pointops_cuda.nearestneighbor_cuda(b, n, m,", "x_norm.view(1, -1) dist = x_norm + y_norm - 2.0 *", "return None, None featuredistribute = FeatureDistribute.apply class FeatureGather(Function): @staticmethod def", "BallQuery(Function): @staticmethod def forward(ctx, radius: float, nsample: int, xyz: torch.Tensor,", "int, maximum number of features in the balls xyz: torch.Tensor,", "\"\"\" Groups with a ball query of radius parameters: radius:", "group with output: (b, c, m, nsample) \"\"\" assert features.is_contiguous()", "of the features new_xyz: (b, m, 3) centriods output: idx:", "''' Input: x is a Nxd matrix y is an", "2).contiguous() grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m, nsample)", "new_xyz) # (b, m, nsample) idx = knnquery(2*self.nsample, xyz, new_xyz)", "m) :return: distribute_feature: (b, c, m) ''' assert max_feature.is_contiguous() assert", "\"\"\" input: xyz: (b, n, 3) coordinates of the features", "torch.Tensor, distribute_idx: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param max_feature:", "super(Le_QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz def", "Tuple[torch.Tensor]: \"\"\" input: xyz: (b, n, 3) coordinates of the", "from torch.autograd import Function import torch.nn as nn from metrics.pointops", ":param xyz: (b, n, 3) :param new_xyz: (b, m, 3)", "return torch.clamp(dist, 0.0, np.inf) class KNNQueryNaive(Function): @staticmethod def forward(ctx, nsample:", "grouped_xyz return grouped_xyz, new_features class Gen_QueryAndGroupXYZ(nn.Module): \"\"\" Groups with a", "nsample) pointops_cuda.grouping_int_forward_cuda(b, c, n, m, nsample, features, idx, output) return", "input: xyz: (b, n, 3) and n > m, m:", "pointops_cuda.labelstat_idx_cuda(b, n, m, nsample, nclass, label_stat, idx, new_label_stat) return new_label_stat", "# new_xyz_repeat = new_xyz.repeat(1, 1, n).view(b, m * n, 3)", "m, nsample).zero_() pointops_cuda.knnquery_cuda(b, n, m, nsample, xyz, new_xyz, idx, dist2)", "ctx.for_backwards = (idx, c, n) return output @staticmethod def backward(ctx,", "= xyz b, m, _ = new_xyz.size() n = xyz.size(1)", "torch.autograd import Function import torch.nn as nn from metrics.pointops import", "backward(ctx, grad_distribute_feature: torch.Tensor): ''' :param ctx: :param grad_distribute_feature: (b, c,", "-> Tuple[torch.Tensor]: \"\"\" input: xyz: (b, n, 3) coordinates of", "m, 3) centriods output: new_features: (b, m, nsample) \"\"\" if", "m, nsample) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous() b,", "= (idx, n) return output @staticmethod def backward(ctx, grad_out: torch.Tensor)", "c, m, nsample) pointops_cuda.grouping_forward_cuda(b, c, n, m, nsample, features, idx,", "idx @staticmethod def backward(ctx, a=None, b=None): return None, None nearestneighbor", "@staticmethod def backward(ctx, a=None): return None, None, None, None labelstat_ballrange", "new_xyz: torch.Tensor, label_stat: torch.Tensor): ''' :param ctx: :param radius: :param", "(b, c, m) \"\"\" assert features.is_contiguous() assert idx.is_contiguous() b, c,", "y_t = torch.transpose(y, 0, 1) y_norm = (y ** 2).sum(1).view(1,", "def backward(ctx, a=None): return None, None, None, None ballquery =", "_ = xyz.size() m = new_xyz.size(1) idx = torch.cuda.IntTensor(b, m,", "def backward(ctx, grad_out: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \"\"\" input: grad_out:", "None: y_t = torch.transpose(y, 0, 1) y_norm = (y **", "a=None): return None, None, None knnquery = KNNQuery.apply class KNNQueryExclude(Function):", "tensor of the interpolated features \"\"\" assert features.is_contiguous() assert idx.is_contiguous()", "torch from torch.autograd import Function import torch.nn as nn from", "(b, c, m, nsample) output: (b, c, n), None \"\"\"", "torch.Tensor = None, features: torch.Tensor = None, idx: torch.Tensor =", "None, idx: torch.Tensor = None) -> torch.Tensor: def forward(self, xyz:", "torch.Tensor) -> torch.Tensor: \"\"\" :param ctx: :param max_xyz: (b, n,", "b, c, m, nsample = grad_out.size() grad_features = torch.cuda.FloatTensor(b, c,", "= grad_distribute_feature.size() grad_max_feature = torch.cuda.FloatTensor(b, c, n).zero_() grad_distribute_feature_data = grad_distribute_feature.data.contiguous()", "= grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b, c, n, m, grad_out_data, idx, weight, grad_features.data)", "nsample, features, idx, output) ctx.for_backwards = (idx, n) return output", "= torch.cuda.FloatTensor(b, m, nsample).zero_() pointops_cuda.knnquery_cuda(b, n, m, nsample, xyz, new_xyz,", "weight: (b, n, 3) weights output: (b, c, n) tensor", "None knnquery_exclude = KNNQueryExclude.apply class Le_QueryAndGroup_SameSize(nn.Module): \"\"\" Groups with a", "Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: \"\"\" input: grad_out: (b, c, n) output:", "unknown in known input: unknown: (b, n, 3), known: (b,", ":param ctx: :param radius: :param nsample: :param xyz: (b, n,", "c, n = max_feature.size() m = distribute_idx.size(1) distribute_feature = torch.cuda.FloatTensor(b,", "torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, m, nsample) else: new_features", "assert xyz.is_contiguous() b, n, _ = xyz.size() idx = torch.cuda.IntTensor(b,", "1) y_norm = x_norm.view(1, -1) dist = x_norm + y_norm", "as a feature!\" new_features = grouped_xyz return new_features class GroupAll(nn.Module):", "c+3, m, nsample) # le new_features = grouped_features # (b,", "torch.cuda.FloatTensor(b, c, m).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.interpolation_backward_cuda(b, c, n, m,", "input: radius: float, radius of the balls nsample: int, maximum", "idx[:, :, idx2] xyz_trans = xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans,", "three nearest neighbors of the target features in features weight:", "(b, m, nsample) :return: new_label_stat: (b, m, nclass) ''' assert", "neighbors idx: (b, n, 3) index of 3 nearest neighbors", "pointops_cuda.interpolation_forward_cuda(b, c, m, n, features, idx, weight, output) return output", "n), None \"\"\" idx, n = ctx.for_backwards b, c, m,", "return idx @staticmethod def backward(ctx): return None, None, None knnquery_exclude", "radius, nsample, nclass, new_xyz, xyz, label_stat, idx, new_label_stat) return new_label_stat,", "None, None furthestsampling = FurthestSampling.apply class Gathering(Function): @staticmethod def forward(ctx,", "@staticmethod def backward(ctx, a=None): return None, None grouping_int = GroupingInt.apply", "c, n), idx : (b, m, nsample) containing the indicies", "idx.is_contiguous() assert weight.is_contiguous() b, c, m = features.size() n =", "= label_stat.size() m = new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_()", "class Le_QueryAndGroup(nn.Module): \"\"\" Groups with a ball query of radius", "1, N) tensor \"\"\" grouped_xyz = xyz.transpose(1, 2).unsqueeze(2) if features", "torch.cuda.IntTensor(b, m).zero_() pointops_cuda.featuredistribute_cuda(b, n, m, max_xyz, xyz, distribute_idx) return distribute_idx", "(b, n, nclass) :return: new_label_stat: (b, m, nclass) ''' assert", "> m, m: int32 output: idx: (b, m) \"\"\" assert", ":param nsample: :param xyz: (b, n, 3) :param new_xyz: (b,", "Tuple[torch.Tensor]: \"\"\" KNN Indexing input: nsample: int32, Number of neighbor", "torch.Tensor) -> torch.Tensor: ''' :param ctx: :param nsample: :param label_stat:", "features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor: \"\"\" input: features: (b,", "idx, new_label_stat) return new_label_stat, idx @staticmethod def backward(ctx, a=None, b=None):", "nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor = None) -> Tuple[torch.Tensor]:", "m, 3) :param label_stat: (b, n, nclass) :return: new_label_stat: (b,", "(b, c, n) idx: idx of neighbors # idxs: (b,", "Gathering.apply class NearestNeighbor(Function): @staticmethod def forward(ctx, unknown: torch.Tensor, known: torch.Tensor)", "c, m, nsample = grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_()", "knnquery_exclude = KNNQueryExclude.apply class Le_QueryAndGroup_SameSize(nn.Module): \"\"\" Groups with a ball", "of neighbors # idxs: (b, n) output: new_features: (b, c+3,", "of 3 nearest neighbors \"\"\" assert unknown.is_contiguous() assert known.is_contiguous() b,", "n) dist = (new_xyz.repeat(1, 1, n).view(b, m * n, 3)", "nsample).zero_() pointops_cuda.knnquery_cuda(b, n, m, nsample, xyz, new_xyz, idx, dist2) return", "nsample) # grouped_idxs: (b, m, nsample) \"\"\" if new_xyz is", "# (b, c+3, m, nsample) else: new_features = grouped_features else:", "label_stat: (b, n, nclass) :return: new_label_stat: (b, m, nclass) '''", "idx = torch.cuda.IntTensor(b, n, 3) pointops_cuda.nearestneighbor_cuda(b, n, m, unknown, known,", "''' assert label_stat.is_contiguous() assert idx.is_contiguous() b, n, nclass = label_stat.size()", "# ''' return idx @staticmethod def backward(ctx): return None, None,", "\"\"\" if new_xyz is None: new_xyz = xyz #if idx", "= xyz.transpose(1, 2).contiguous() #grouped_xyz = grouping(xyz_trans, idx) # (b, 3,", "as nn from metrics.pointops import pointops_cuda import numpy as np", "= knnquery_naive(self.nsample, xyz, new_xyz) # (b, m, nsample) idx =", "= LabelStatIdx.apply class LabelStatAndBallQuery(Function): @staticmethod def forward(ctx, radius: float, nsample:", "idx @staticmethod def backward(ctx, a=None): return None, None, None knnquery", "indicies of features to group with output: (b, c, m,", "new_label_stat, idx @staticmethod def backward(ctx, a=None, b=None): return None, None,", "grouped_xyz -= new_xyz.transpose(1, 2).unsqueeze(-1) if features is not None: grouped_features", "grouped_idxs: (b, m, nsample) \"\"\" if new_xyz is None: new_xyz", "torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, 1, n) else: new_features", "new_xyz: (b, n, 3) centriods features: (b, c, n) idx:", "return new_label_stat @staticmethod def backward(ctx, a=None): return None, None, None", "def backward(ctx, grad_distribute_feature: torch.Tensor): ''' :param ctx: :param grad_distribute_feature: (b,", "None) -> torch.Tensor: def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor =", "3) coordinates of the features new_xyz: ignored torch features: (b,", "new_xyz, idx, dist2) return idx @staticmethod def backward(ctx, a=None): return", "be interpolated from idx: (b, n, 3) three nearest neighbors", "features.unsqueeze(2) if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b,", "n, m, max_xyz, xyz, distribute_idx) return distribute_idx @staticmethod def backward(ctx,", "idx) return idx @staticmethod def backward(xyz, a=None): return None, None", "LabelStatBallRange(Function): @staticmethod def forward(ctx, radius: float, xyz: torch.Tensor, new_xyz: torch.Tensor,", "(b, n, 3) three nearest neighbors of the target features", "= pairwise_distances(new_xyz[i, :, :], xyz[i, :, :]) [_, idxs] =", "idx: (b, m, nsample) ( dist2: (b, m, nsample) )", "grouped_features else: assert self.use_xyz, \"Cannot have not features and not", "features: (b, c, m) features descriptors to be interpolated from", "new_xyz) else: idx = knnquery(self.nsample, xyz, new_xyz) # (b, m,", "__init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_SameSize, self).__init__() self.radius, self.nsample, self.use_xyz =", "idx: (b, m) \"\"\" assert xyz.is_contiguous() b, n, _ =", "= torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.grouping_backward_cuda(b, c, n,", "idx: (b, m, nsample) \"\"\" if new_xyz is None: new_xyz", "y_t) import numpy as np return torch.clamp(dist, 0.0, np.inf) class", "torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_idx_cuda(b, n, m, nsample, nclass, label_stat, idx,", "== new_xyz.size() if new_xyz is None: new_xyz = xyz if", "grad_features, None, None interpolation = Interpolation.apply class Grouping(Function): @staticmethod def", "new_xyz.size() n = xyz.size(1) ''' idx = torch.zeros(b, m, nsample).int().cuda()", "new_xyz is None: new_xyz = xyz #if idx is None:", "torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.labelstat_and_ballquery_cuda(b, n, m, radius, nsample, nclass, new_xyz,", "None) -> Tuple[torch.Tensor]: \"\"\" input: xyz: (b, n, 3) coordinates", "n, _ = xyz.size() idx = torch.cuda.IntTensor(b, m) temp =", "a feature!\" new_features = grouped_xyz return new_features class Le_QueryAndGroup(nn.Module): \"\"\"", "m, n) [_, idxs] = torch.sort(dist, dim=2) idx = idxs[:,", "use_xyz def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor =", "Find the three nearest neighbors of unknown in known input:", "super(GroupAll, self).__init__() self.use_xyz = use_xyz def forward(self, xyz: torch.Tensor, new_xyz:", "** 2).sum(1).view(1, -1) else: y_t = torch.transpose(x, 0, 1) y_norm", "xyz.transpose(1, 2).contiguous() #grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m,", "None \"\"\" idx, n = ctx.for_backwards b, c, m, nsample", "input: features: (b, c, n), idx : (b, m, nsample)", "(b, 3, m, nsample) return grouped_xyz class Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\" Groups", "pointops_cuda.gathering_backward_cuda(b, c, n, m, grad_out_data, idx, grad_features.data) return grad_features, None", "features: (b, c, n) idx: idx of neighbors # idxs:", "1, n) else: new_features = grouped_features else: new_features = grouped_xyz", "backward(ctx, a=None): return None, None, None labelstat_idx = LabelStatIdx.apply class", "nsample) \"\"\" assert xyz.size() == new_xyz.size() if new_xyz is None:", "m, nsample = grad_out.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data", "c, n, m, grad_out_data, idx, weight, grad_features.data) return grad_features, None,", "= torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, m, nsample) else:", "idx, new_label_stat) return new_label_stat @staticmethod def backward(ctx, a=None): return None,", "3)).pow(2).sum(dim=2).view(b, m, n) [_, idxs] = torch.sort(dist, dim=2) idx =", "= grouping(idxs.unsqueeze(1).float(), idx).squeeze(1).int() # (b, m, nsample) #grouped_xyz -= new_xyz.transpose(1,", "= x_norm + y_norm - 2.0 * torch.mm(x, y_t) import", "= None, idx: torch.Tensor = None) -> torch.Tensor: def forward(self,", ":param radius: :param xyz: (b, n, 3) :param new_xyz: (b,", "x[i,:] and y[j,:] if y is not given then use", "grouped_xyz class Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\" Groups with a ball query of", "assert idx.is_contiguous() b, c, n = features.size() _, m, nsample", "LabelStatIdx.apply class LabelStatAndBallQuery(Function): @staticmethod def forward(ctx, radius: float, nsample: int,", "pointops_cuda.ballquery_cuda(b, n, m, radius, nsample, new_xyz, xyz, idx) return idx", "assert new_xyz.is_contiguous() assert label_stat.is_contiguous() b, n, nclass = label_stat.size() m", "nsample=32, use_xyz=True): super(Gen_QueryAndGroupXYZ, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample,", "c, m) :return: grad_max_feature: (b, c, n), None ''' distribute_idx,", "ctx.for_backwards b, c, m = grad_distribute_feature.size() grad_max_feature = torch.cuda.FloatTensor(b, c,", "xyz as a feature!\" new_features = grouped_xyz return grouped_xyz, new_features", "None: grouped_features = features.unsqueeze(2) if self.use_xyz: new_features = torch.cat([grouped_xyz, grouped_features],", "return None, None, None knnquery_exclude = KNNQueryExclude.apply class Le_QueryAndGroup_SameSize(nn.Module): \"\"\"", "(b, m, nsample) \"\"\" if new_xyz is None: new_xyz =", "backward(ctx): return None, None, None knnquery_naive = KNNQueryNaive.apply class KNNQuery(Function):", "is a NxM matrix where dist[i,j] is the square norm", "to the three nearest neighbors idx: (b, n, 3) index", "output: new_features: (b, c+3, 1, N) tensor \"\"\" grouped_xyz =", "xyz.is_contiguous() b, n, _ = xyz.size() idx = torch.cuda.IntTensor(b, m)", "def __init__(self, radius=None, nsample=32, use_xyz=True): super(QueryAndGroup_Dilate, self).__init__() self.radius, self.nsample, self.use_xyz", "torch.cuda.FloatTensor(b, c, n) pointops_cuda.interpolation_forward_cuda(b, c, m, n, features, idx, weight,", "idx2 = np.array([i for i in range(2*self.nsample)]) np.random.shuffle(idx2) idx2 =", "feature!\" new_features = grouped_xyz return new_features class GroupAll(nn.Module): \"\"\" Groups", "Tuple import torch from torch.autograd import Function import torch.nn as", "grad_max_feature, None featuregather = FeatureGather.apply class LabelStatBallRange(Function): @staticmethod def forward(ctx,", "\"\"\" input: grad_out: (b, c, n) output: grad_features: (b, c,", "in the ball \"\"\" def __init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup_OnlyFeature,", "features: (b, c, n), idx : (b, m) tensor output:", "as a feature!\" new_features = grouped_xyz return new_features class QueryAndGroup_Dilate(nn.Module):", "n = ctx.for_backwards b, c, m = grad_distribute_feature.size() grad_max_feature =", "__init__(self, radius=None, nsample=32, use_xyz=True): super(Gen_QueryAndGroupXYZ, self).__init__() self.radius, self.nsample, self.use_xyz =", "3) :return: distribute_idx: (b, m) \"\"\" assert max_xyz.is_contiguous() assert xyz.is_contiguous()", "c, n = ctx.for_backwards b, m = idx.size() grad_features =", "use_xyz #def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor = None, features:", "torch.cuda.FloatTensor(b, n, 3) idx = torch.cuda.IntTensor(b, n, 3) pointops_cuda.nearestneighbor_cuda(b, n,", "m, nsample = idx.size() output = torch.cuda.FloatTensor(b, c, m, nsample)", "(b, n, 3) l2 distance to the three nearest neighbors", "m, 3) centers of the ball query output: (b, m,", "c, m), None, None \"\"\" idx, weight, m = ctx.interpolation_for_backward", "of the balls nsample: int, maximum number of features in", ":] = idxs[:, 0:nsample] ''' # ''' # new_xyz_repeat =", "ctx.for_backwards b, c, m, nsample = grad_out.size() grad_features = torch.cuda.FloatTensor(b,", "use_xyz=True): super(QueryAndGroup_Dilate, self).__init__() self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz", "new_xyz) # (b, m, nsample) xyz_trans = xyz.transpose(1, 2).contiguous() #", "grouped_xyz = grouping(xyz_trans, idx) # (b, 3, m, nsample) #", "xyz.repeat(1, m, 1).view(b, m * n, 3)).pow(2).sum(dim=2).view(b, m, n) [_,", "n) output: grad_features: (b, c, m), None, None \"\"\" idx,", "an optional Mxd matirx Output: dist is a NxM matrix", "c, m, nsample) output: (b, c, n), None \"\"\" idx,", "''' # ''' # new_xyz_repeat = new_xyz.repeat(1, 1, n).view(b, m", "distribute_idx.size(1) distribute_feature = torch.cuda.FloatTensor(b, c, m).zero_() pointops_cuda.featuregather_forward_cuda(b, n, m, c,", "m, nsample) # grouped_idxs: (b, m, nsample) \"\"\" assert xyz.size()", "the indicies of features to group with output: (b, c,", "output) ctx.for_backwards = (idx, c, n) return output @staticmethod def", "nsample) output: (b, c, n), None \"\"\" idx, n =", "@staticmethod def forward(ctx, unknown: torch.Tensor, known: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:", "__init__(self, radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz =", "class Gen_QueryAndGroupXYZ(nn.Module): \"\"\" Groups with a ball query of radius", "pointops_cuda.featuregather_backward_cuda(b, n, m, c, grad_distribute_feature_data, distribute_idx, grad_max_feature.data) return grad_max_feature, None", "m, nclass).zero_() pointops_cuda.labelstat_idx_cuda(b, n, m, nsample, nclass, label_stat, idx, new_label_stat)", "Le_QueryAndGroup_OnlyFeature(nn.Module): \"\"\" Groups with a ball query of radius parameters:", "use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' x_norm = (x", "new_label_stat) return new_label_stat, idx @staticmethod def backward(ctx, a=None, b=None): return", "(b, n, 3), known: (b, m, 3) output: dist2: (b,", "in known input: unknown: (b, n, 3), known: (b, m,", "torch.Tensor, (b, m, 3) centers of the ball query output:", "= xyz.transpose(1, 2).contiguous() grouped_xyz = grouping(xyz_trans, idx) # (b, 3,", "features output: new_features: (b, c+3, 1, N) tensor \"\"\" grouped_xyz", "xyz, new_xyz, idx, dist2) return idx @staticmethod def backward(ctx, a=None):", "new_xyz.size() n = xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_() dist2", "not None: y_t = torch.transpose(y, 0, 1) y_norm = (y", "distribute_idx: torch.Tensor) -> torch.Tensor: ''' :param ctx: :param max_feature: (b,", "= knnquery(self.nsample, xyz, new_xyz) # (b, m, nsample) xyz_trans =", "(b, m, nsample) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous() assert label_stat.is_contiguous()", "new_label_stat: (b, m, nclass) ''' assert label_stat.is_contiguous() assert idx.is_contiguous() b,", "LabelStatIdx(Function): @staticmethod def forward(ctx, nsample: int, label_stat: torch.Tensor, idx: torch.Tensor)", "n) else: new_features = grouped_features else: new_features = grouped_xyz return", "n, m, radius, nsample, new_xyz, xyz, idx) return idx @staticmethod", ":return: new_label_stat: (b, m, nclass) ''' assert xyz.is_contiguous() assert new_xyz.is_contiguous()", "b, c, n = features.size() m = idx.size(1) output =", "class QueryAndGroup(nn.Module): \"\"\" Groups with a ball query of radius", "= radius, nsample, use_xyz #def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor", "dist[i,j] is the square norm between x[i,:] and y[j,:] if", "features new_xyz: torch.Tensor, (b, m, 3) centers of the ball", "3) # dist = (new_xyz_repeat - xyz_repeat).pow(2).sum(dim=2).view(b, m, n) dist", "c+3, m, nsample) # grouped_idxs: (b, m, nsample) \"\"\" if", "m, nsample) ( dist2: (b, m, nsample) ) \"\"\" if", "torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b, c, n, m,", "a feature!\" new_features = grouped_xyz return grouped_xyz, new_features class QueryAndGroup(nn.Module):", "= (y ** 2).sum(1).view(1, -1) else: y_t = torch.transpose(x, 0,", "x is a Nxd matrix y is an optional Mxd", "\"\"\" input: features: (b, c, n), idx : (b, m)", "''' return idx @staticmethod def backward(ctx): return None, None, None", "= new_xyz.size(1) idx = torch.cuda.IntTensor(b, m, nsample).zero_() pointops_cuda.ballquery_cuda(b, n, m,", "new_features = grouped_xyz return grouped_xyz, new_features class Gen_QueryAndGroupXYZ(nn.Module): \"\"\" Groups", "new_features class QueryAndGroup(nn.Module): \"\"\" Groups with a ball query of", "idx): \"\"\" input: features: (b, c, n), idx : (b,", "= idx.size() grad_features = torch.cuda.FloatTensor(b, c, n).zero_() grad_out_data = grad_out.data.contiguous()", "-> torch.Tensor: ''' :param ctx: :param radius: :param xyz: (b,", "new_xyz, xyz, label_stat, new_label_stat) return new_label_stat @staticmethod def backward(ctx, a=None):", "= grad_out.data.contiguous() pointops_cuda.gathering_backward_cuda(b, c, n, m, grad_out_data, idx, grad_features.data) return", "return None, None, None knnquery_naive = KNNQueryNaive.apply class KNNQuery(Function): @staticmethod", "new_features = torch.cat([grouped_xyz, grouped_features], dim=1) # (b, c+3, m, nsample)", "m, nsample) # grouped_idxs: (b, m, nsample) \"\"\" if new_xyz", "in range(2*self.nsample)]) np.random.shuffle(idx2) idx2 = idx2[:self.nsample] idx = idx[:, :,", "label_stat.size() m = new_xyz.size(1) new_label_stat = torch.cuda.IntTensor(b, m, nclass).zero_() pointops_cuda.labelstat_ballrange_cuda(b,", "idxs] = torch.sort(dist, dim=2) idx = idxs[:, :, 0:nsample].int() #", "the features output: new_features: (b, c+3, 1, N) tensor \"\"\"", "else: y_t = torch.transpose(x, 0, 1) y_norm = x_norm.view(1, -1)", "class QueryAndGroup_Dilate(nn.Module): \"\"\" Groups with a ball query of radius", "idx, c, n = ctx.for_backwards b, m = idx.size() grad_features", "3) weights output: (b, c, n) tensor of the interpolated", "radius=None, nsample=32, use_xyz=True): super(Le_QueryAndGroup, self).__init__() self.radius, self.nsample, self.use_xyz = radius,", "self.radius is not None: idx = ballquery(self.radius, 2*self.nsample, xyz, new_xyz)", "torch.Tensor]: \"\"\" Find the three nearest neighbors of unknown in", "self.radius, self.nsample, self.use_xyz = radius, nsample, use_xyz #def forward(self, xyz:", "of the features new_xyz: (b, n, 3) centriods features: (b,", "def forward(ctx, nsample: int, label_stat: torch.Tensor, idx: torch.Tensor) -> torch.Tensor:", "output: (b, c, n), None \"\"\" idx, n = ctx.for_backwards", "class LabelStatAndBallQuery(Function): @staticmethod def forward(ctx, radius: float, nsample: int, xyz:", "torch.Tensor, new_xyz: torch.Tensor = None) -> Tuple[torch.Tensor]: \"\"\" KNN Indexing", "pointops_cuda.labelstat_ballrange_cuda(b, n, m, radius, nclass, new_xyz, xyz, label_stat, new_label_stat) return", "None \"\"\" idx, weight, m = ctx.interpolation_for_backward b, c, n", "= grouping(xyz_trans, idx) # (b, 3, m, nsample) # grouped_idxs", "= torch.cuda.FloatTensor(b, c, n) pointops_cuda.interpolation_forward_cuda(b, c, m, n, features, idx,", "= Grouping.apply class GroupingInt(Function): @staticmethod def forward(ctx, features: torch.Tensor, idx:", "forward(ctx, nsample: int, xyz: torch.Tensor, new_xyz: torch.Tensor = None) ->", "# grouped_idxs: (b, m, nsample) \"\"\" assert xyz.size() == new_xyz.size()", "idx) # (b, 3, m, nsample) return grouped_xyz class Le_QueryAndGroup_OnlyFeature(nn.Module):", "m) \"\"\" assert features.is_contiguous() assert idx.is_contiguous() b, c, n =", "n, 3)).pow(2).sum(dim=2).view(b, m, n) [_, idxs] = torch.sort(dist, dim=2) idx", "''' :param ctx: :param radius: :param xyz: (b, n, 3)", "new_label_stat @staticmethod def backward(ctx, a=None): return None, None, None labelstat_idx", "None, None knnquery_naive = KNNQueryNaive.apply class KNNQuery(Function): @staticmethod def forward(ctx,", "c, m, n, features, idx, weight, output) return output @staticmethod", "if self.radius is not None: idx = ballquery(self.radius, 2*self.nsample, xyz,", "m, nsample) :return: new_label_stat: (b, m, nclass) ''' assert label_stat.is_contiguous()", "output = torch.cuda.FloatTensor(b, c, m, nsample) pointops_cuda.grouping_forward_cuda(b, c, n, m," ]
[ "def get(self, unique_id, key, factory, suffix=''): cache = self.cache if", "zope.cachedescriptors.property import Lazy as cachedproperty from zeit.cms.content.sources import FEATURE_TOGGLES from", "= int(self.connector.mtime(unique_id, suffix)) except (ValueError, TypeError): mtime = None if", "= obj['data'] if key not in cache: cache[key] = factory()", "zope.component import getUtility from zeit.connector.interfaces import IConnector from zeit.connector.filesystem import", "import environ from time import time from zope.cachedescriptors.property import Lazy", "factory() obj = cache[unique_id] obj['used'] += 1 obj['last'] = time()", "except (ValueError, TypeError): mtime = None if mtime is None:", "mtime cache = obj['data'] if key not in cache: cache[key]", "= self.misses = 0 log.info('initialized content cache (size %s)', size)", "size) return self.cache else: return None def get(self, unique_id, key,", "from time import time from zope.cachedescriptors.property import Lazy as cachedproperty", "not None and type(connector) is Connector: self.size = int(size) self.check", "self.connector = connector self.cache = defaultdict(lambda: dict(used=0, mtimes={}, data={})) self.hits", "self.hits += 1 return cache[key] def cleanup(self): cache = self.cache", "from collections import defaultdict from logging import getLogger from operator", "obj['data'] if key not in cache: cache[key] = factory() self.misses", "log.debug('removing %d items', over) last = sorted((cache[uid]['last'], uid) for uid", "uid in cache) for _, (_, uid) in zip(range(over), last):", "= environ.get('CONTENT_CACHE_SIZE') check = environ.get('CONTENT_CACHE_CHECK') connector = getUtility(IConnector) if size", "uid) in zip(range(over), last): del cache[uid] @property def usage(self): cache", "len(cache) - self.size log.info('size: %d/%d, hits: %d, misses: %d', over", "zip(range(over), last): del cache[uid] @property def usage(self): cache = self.cache", "cache = self.cache usage = {info['uid']: info['used'] for info in", "cache = self.cache stats = (dict(uid=uid, used=cache[uid]['used']) for uid in", "dict( size=self.size, count=len(cache), hits=self.hits, misses=self.misses, usage=usage) __cache = ContentCache() get", "time import time from zope.cachedescriptors.property import Lazy as cachedproperty from", "= sorted((cache[uid]['last'], uid) for uid in cache) for _, (_,", "None if mtime is None: return factory() obj = cache[unique_id]", "cache[uid] @property def usage(self): cache = self.cache stats = (dict(uid=uid,", "check is not None else self.size / 5 self.connector =", "over > 0: log.debug('removing %d items', over) last = sorted((cache[uid]['last'],", "else: return None def get(self, unique_id, key, factory, suffix=''): cache", "self.check == 0: self.cleanup() else: self.hits += 1 return cache[key]", "os import environ from time import time from zope.cachedescriptors.property import", "import defaultdict from logging import getLogger from operator import itemgetter", "mtime) if self.misses % self.check == 0: self.cleanup() else: self.hits", "(dict(uid=uid, used=cache[uid]['used']) for uid in cache) return sorted(stats, key=itemgetter('used')) def", "int(self.connector.mtime(unique_id, suffix)) except (ValueError, TypeError): mtime = None if mtime", "as cachedproperty from zeit.cms.content.sources import FEATURE_TOGGLES from zope.component import getUtility", "+= 1 log.debug('added %s (%s)', key, mtime) if self.misses %", "in cache) for _, (_, uid) in zip(range(over), last): del", "misses=self.misses, usage=usage) __cache = ContentCache() get = __cache.get info =", "import Connector log = getLogger(__name__) class ContentCache(object): @cachedproperty def cache(self):", "import getUtility from zeit.connector.interfaces import IConnector from zeit.connector.filesystem import Connector", "obj['mtimes'].get(suffix): obj['data'].clear() obj['mtimes'][suffix] = mtime cache = obj['data'] if key", "+ self.size, self.size, self.hits, self.misses) if over > 0: log.debug('removing", "def cleanup(self): cache = self.cache over = len(cache) - self.size", "usage(self): cache = self.cache stats = (dict(uid=uid, used=cache[uid]['used']) for uid", "= {info['uid']: info['used'] for info in reversed(self.usage)} return dict( size=self.size,", "from zeit.connector.filesystem import Connector log = getLogger(__name__) class ContentCache(object): @cachedproperty", "self.size = int(size) self.check = int(check) if check is not", "1 log.debug('added %s (%s)', key, mtime) if self.misses % self.check", "in reversed(self.usage)} return dict( size=self.size, count=len(cache), hits=self.hits, misses=self.misses, usage=usage) __cache", "zeit.connector.interfaces import IConnector from zeit.connector.filesystem import Connector log = getLogger(__name__)", "5 self.connector = connector self.cache = defaultdict(lambda: dict(used=0, mtimes={}, data={}))", "obj = cache[unique_id] obj['used'] += 1 obj['last'] = time() if", "= time() if mtime != obj['mtimes'].get(suffix): obj['data'].clear() obj['mtimes'][suffix] = mtime", "is not None else self.size / 5 self.connector = connector", "if mtime != obj['mtimes'].get(suffix): obj['data'].clear() obj['mtimes'][suffix] = mtime cache =", "= getLogger(__name__) class ContentCache(object): @cachedproperty def cache(self): size = environ.get('CONTENT_CACHE_SIZE')", "IConnector from zeit.connector.filesystem import Connector log = getLogger(__name__) class ContentCache(object):", "Lazy as cachedproperty from zeit.cms.content.sources import FEATURE_TOGGLES from zope.component import", "= cache[unique_id] obj['used'] += 1 obj['last'] = time() if mtime", "mtime is None: return factory() obj = cache[unique_id] obj['used'] +=", "obj['last'] = time() if mtime != obj['mtimes'].get(suffix): obj['data'].clear() obj['mtimes'][suffix] =", "from zope.cachedescriptors.property import Lazy as cachedproperty from zeit.cms.content.sources import FEATURE_TOGGLES", "cache(self): size = environ.get('CONTENT_CACHE_SIZE') check = environ.get('CONTENT_CACHE_CHECK') connector = getUtility(IConnector)", "self.size log.info('size: %d/%d, hits: %d, misses: %d', over + self.size,", "from zeit.cms.content.sources import FEATURE_TOGGLES from zope.component import getUtility from zeit.connector.interfaces", "= None if mtime is None: return factory() obj =", "key=itemgetter('used')) def info(self): cache = self.cache usage = {info['uid']: info['used']", "= 0 log.info('initialized content cache (size %s)', size) return self.cache", "from zeit.connector.interfaces import IConnector from zeit.connector.filesystem import Connector log =", "info(self): cache = self.cache usage = {info['uid']: info['used'] for info", "{info['uid']: info['used'] for info in reversed(self.usage)} return dict( size=self.size, count=len(cache),", "operator import itemgetter from os import environ from time import", "connector = getUtility(IConnector) if size is not None and type(connector)", "time() if mtime != obj['mtimes'].get(suffix): obj['data'].clear() obj['mtimes'][suffix] = mtime cache", "size=self.size, count=len(cache), hits=self.hits, misses=self.misses, usage=usage) __cache = ContentCache() get =", "= self.cache if cache is None or not FEATURE_TOGGLES.find('content_caching'): return", "self.cache usage = {info['uid']: info['used'] for info in reversed(self.usage)} return", "@cachedproperty def cache(self): size = environ.get('CONTENT_CACHE_SIZE') check = environ.get('CONTENT_CACHE_CHECK') connector", "%d, misses: %d', over + self.size, self.size, self.hits, self.misses) if", "(ValueError, TypeError): mtime = None if mtime is None: return", "hits=self.hits, misses=self.misses, usage=usage) __cache = ContentCache() get = __cache.get info", "def cache(self): size = environ.get('CONTENT_CACHE_SIZE') check = environ.get('CONTENT_CACHE_CHECK') connector =", "environ from time import time from zope.cachedescriptors.property import Lazy as", "int(size) self.check = int(check) if check is not None else", "getLogger from operator import itemgetter from os import environ from", "else: self.hits += 1 return cache[key] def cleanup(self): cache =", "self.cache over = len(cache) - self.size log.info('size: %d/%d, hits: %d,", "self.cache else: return None def get(self, unique_id, key, factory, suffix=''):", "self.size / 5 self.connector = connector self.cache = defaultdict(lambda: dict(used=0,", "self.cache = defaultdict(lambda: dict(used=0, mtimes={}, data={})) self.hits = self.misses =", "return cache[key] def cleanup(self): cache = self.cache over = len(cache)", "= environ.get('CONTENT_CACHE_CHECK') connector = getUtility(IConnector) if size is not None", "obj['used'] += 1 obj['last'] = time() if mtime != obj['mtimes'].get(suffix):", "log = getLogger(__name__) class ContentCache(object): @cachedproperty def cache(self): size =", "== 0: self.cleanup() else: self.hits += 1 return cache[key] def", "info['used'] for info in reversed(self.usage)} return dict( size=self.size, count=len(cache), hits=self.hits,", "class ContentCache(object): @cachedproperty def cache(self): size = environ.get('CONTENT_CACHE_SIZE') check =", "from operator import itemgetter from os import environ from time", "mtimes={}, data={})) self.hits = self.misses = 0 log.info('initialized content cache", "in cache: cache[key] = factory() self.misses += 1 log.debug('added %s", "key, mtime) if self.misses % self.check == 0: self.cleanup() else:", "= len(cache) - self.size log.info('size: %d/%d, hits: %d, misses: %d',", "time from zope.cachedescriptors.property import Lazy as cachedproperty from zeit.cms.content.sources import", "0: self.cleanup() else: self.hits += 1 return cache[key] def cleanup(self):", "sorted(stats, key=itemgetter('used')) def info(self): cache = self.cache usage = {info['uid']:", "get(self, unique_id, key, factory, suffix=''): cache = self.cache if cache", "uid) for uid in cache) for _, (_, uid) in", "in zip(range(over), last): del cache[uid] @property def usage(self): cache =", "self.size, self.size, self.hits, self.misses) if over > 0: log.debug('removing %d", "for uid in cache) for _, (_, uid) in zip(range(over),", "for _, (_, uid) in zip(range(over), last): del cache[uid] @property", "def info(self): cache = self.cache usage = {info['uid']: info['used'] for", "cache is None or not FEATURE_TOGGLES.find('content_caching'): return factory() try: mtime", "1 return cache[key] def cleanup(self): cache = self.cache over =", "= getUtility(IConnector) if size is not None and type(connector) is", "environ.get('CONTENT_CACHE_SIZE') check = environ.get('CONTENT_CACHE_CHECK') connector = getUtility(IConnector) if size is", "0 log.info('initialized content cache (size %s)', size) return self.cache else:", "type(connector) is Connector: self.size = int(size) self.check = int(check) if", "content cache (size %s)', size) return self.cache else: return None", "obj['data'].clear() obj['mtimes'][suffix] = mtime cache = obj['data'] if key not", "key, factory, suffix=''): cache = self.cache if cache is None", "self.hits = self.misses = 0 log.info('initialized content cache (size %s)',", "0: log.debug('removing %d items', over) last = sorted((cache[uid]['last'], uid) for", "None else self.size / 5 self.connector = connector self.cache =", "itemgetter from os import environ from time import time from", "and type(connector) is Connector: self.size = int(size) self.check = int(check)", "is None or not FEATURE_TOGGLES.find('content_caching'): return factory() try: mtime =", "if mtime is None: return factory() obj = cache[unique_id] obj['used']", "return None def get(self, unique_id, key, factory, suffix=''): cache =", "is None: return factory() obj = cache[unique_id] obj['used'] += 1", "= self.cache over = len(cache) - self.size log.info('size: %d/%d, hits:", "data={})) self.hits = self.misses = 0 log.info('initialized content cache (size", "ContentCache(object): @cachedproperty def cache(self): size = environ.get('CONTENT_CACHE_SIZE') check = environ.get('CONTENT_CACHE_CHECK')", "Connector: self.size = int(size) self.check = int(check) if check is", "= int(size) self.check = int(check) if check is not None", "return self.cache else: return None def get(self, unique_id, key, factory,", "factory, suffix=''): cache = self.cache if cache is None or", "factory() try: mtime = int(self.connector.mtime(unique_id, suffix)) except (ValueError, TypeError): mtime", "mtime = int(self.connector.mtime(unique_id, suffix)) except (ValueError, TypeError): mtime = None", "self.cleanup() else: self.hits += 1 return cache[key] def cleanup(self): cache", "self.size, self.hits, self.misses) if over > 0: log.debug('removing %d items',", "sorted((cache[uid]['last'], uid) for uid in cache) for _, (_, uid)", "from zope.component import getUtility from zeit.connector.interfaces import IConnector from zeit.connector.filesystem", "info in reversed(self.usage)} return dict( size=self.size, count=len(cache), hits=self.hits, misses=self.misses, usage=usage)", "suffix)) except (ValueError, TypeError): mtime = None if mtime is", "if size is not None and type(connector) is Connector: self.size", "over + self.size, self.size, self.hits, self.misses) if over > 0:", "%d/%d, hits: %d, misses: %d', over + self.size, self.size, self.hits,", "return factory() obj = cache[unique_id] obj['used'] += 1 obj['last'] =", "import time from zope.cachedescriptors.property import Lazy as cachedproperty from zeit.cms.content.sources", "= defaultdict(lambda: dict(used=0, mtimes={}, data={})) self.hits = self.misses = 0", "not in cache: cache[key] = factory() self.misses += 1 log.debug('added", "% self.check == 0: self.cleanup() else: self.hits += 1 return", "%s (%s)', key, mtime) if self.misses % self.check == 0:", "last = sorted((cache[uid]['last'], uid) for uid in cache) for _,", "from os import environ from time import time from zope.cachedescriptors.property", "count=len(cache), hits=self.hits, misses=self.misses, usage=usage) __cache = ContentCache() get = __cache.get", "import getLogger from operator import itemgetter from os import environ", "import FEATURE_TOGGLES from zope.component import getUtility from zeit.connector.interfaces import IConnector", "def usage(self): cache = self.cache stats = (dict(uid=uid, used=cache[uid]['used']) for", "= self.cache stats = (dict(uid=uid, used=cache[uid]['used']) for uid in cache)", "return sorted(stats, key=itemgetter('used')) def info(self): cache = self.cache usage =", "self.cache if cache is None or not FEATURE_TOGGLES.find('content_caching'): return factory()", "last): del cache[uid] @property def usage(self): cache = self.cache stats", "zeit.connector.filesystem import Connector log = getLogger(__name__) class ContentCache(object): @cachedproperty def", "used=cache[uid]['used']) for uid in cache) return sorted(stats, key=itemgetter('used')) def info(self):", "TypeError): mtime = None if mtime is None: return factory()", "misses: %d', over + self.size, self.size, self.hits, self.misses) if over", "log.debug('added %s (%s)', key, mtime) if self.misses % self.check ==", "if cache is None or not FEATURE_TOGGLES.find('content_caching'): return factory() try:", "if key not in cache: cache[key] = factory() self.misses +=", "usage=usage) __cache = ContentCache() get = __cache.get info = __cache.info", "not FEATURE_TOGGLES.find('content_caching'): return factory() try: mtime = int(self.connector.mtime(unique_id, suffix)) except", "dict(used=0, mtimes={}, data={})) self.hits = self.misses = 0 log.info('initialized content", "+= 1 obj['last'] = time() if mtime != obj['mtimes'].get(suffix): obj['data'].clear()", "= int(check) if check is not None else self.size /", "mtime = None if mtime is None: return factory() obj", "if over > 0: log.debug('removing %d items', over) last =", "check = environ.get('CONTENT_CACHE_CHECK') connector = getUtility(IConnector) if size is not", "self.hits, self.misses) if over > 0: log.debug('removing %d items', over)", "getUtility from zeit.connector.interfaces import IConnector from zeit.connector.filesystem import Connector log", "cache[key] def cleanup(self): cache = self.cache over = len(cache) -", "del cache[uid] @property def usage(self): cache = self.cache stats =", "getLogger(__name__) class ContentCache(object): @cachedproperty def cache(self): size = environ.get('CONTENT_CACHE_SIZE') check", "usage = {info['uid']: info['used'] for info in reversed(self.usage)} return dict(", "over) last = sorted((cache[uid]['last'], uid) for uid in cache) for", "suffix=''): cache = self.cache if cache is None or not", "cache) for _, (_, uid) in zip(range(over), last): del cache[uid]", "is not None and type(connector) is Connector: self.size = int(size)", "cache[unique_id] obj['used'] += 1 obj['last'] = time() if mtime !=", "collections import defaultdict from logging import getLogger from operator import", "/ 5 self.connector = connector self.cache = defaultdict(lambda: dict(used=0, mtimes={},", "uid in cache) return sorted(stats, key=itemgetter('used')) def info(self): cache =", "%d', over + self.size, self.size, self.hits, self.misses) if over >", "environ.get('CONTENT_CACHE_CHECK') connector = getUtility(IConnector) if size is not None and", "cachedproperty from zeit.cms.content.sources import FEATURE_TOGGLES from zope.component import getUtility from", "cleanup(self): cache = self.cache over = len(cache) - self.size log.info('size:", "self.check = int(check) if check is not None else self.size", "- self.size log.info('size: %d/%d, hits: %d, misses: %d', over +", "size is not None and type(connector) is Connector: self.size =", "hits: %d, misses: %d', over + self.size, self.size, self.hits, self.misses)", "cache: cache[key] = factory() self.misses += 1 log.debug('added %s (%s)',", "defaultdict(lambda: dict(used=0, mtimes={}, data={})) self.hits = self.misses = 0 log.info('initialized", "cache (size %s)', size) return self.cache else: return None def", "return factory() try: mtime = int(self.connector.mtime(unique_id, suffix)) except (ValueError, TypeError):", "FEATURE_TOGGLES.find('content_caching'): return factory() try: mtime = int(self.connector.mtime(unique_id, suffix)) except (ValueError,", "!= obj['mtimes'].get(suffix): obj['data'].clear() obj['mtimes'][suffix] = mtime cache = obj['data'] if", "= (dict(uid=uid, used=cache[uid]['used']) for uid in cache) return sorted(stats, key=itemgetter('used'))", "mtime != obj['mtimes'].get(suffix): obj['data'].clear() obj['mtimes'][suffix] = mtime cache = obj['data']", "return dict( size=self.size, count=len(cache), hits=self.hits, misses=self.misses, usage=usage) __cache = ContentCache()", "self.misses = 0 log.info('initialized content cache (size %s)', size) return", "> 0: log.debug('removing %d items', over) last = sorted((cache[uid]['last'], uid)", "@property def usage(self): cache = self.cache stats = (dict(uid=uid, used=cache[uid]['used'])", "cache = self.cache if cache is None or not FEATURE_TOGGLES.find('content_caching'):", "self.misses += 1 log.debug('added %s (%s)', key, mtime) if self.misses", "stats = (dict(uid=uid, used=cache[uid]['used']) for uid in cache) return sorted(stats,", "(_, uid) in zip(range(over), last): del cache[uid] @property def usage(self):", "int(check) if check is not None else self.size / 5", "_, (_, uid) in zip(range(over), last): del cache[uid] @property def", "import Lazy as cachedproperty from zeit.cms.content.sources import FEATURE_TOGGLES from zope.component", "from logging import getLogger from operator import itemgetter from os", "(%s)', key, mtime) if self.misses % self.check == 0: self.cleanup()", "items', over) last = sorted((cache[uid]['last'], uid) for uid in cache)", "logging import getLogger from operator import itemgetter from os import", "size = environ.get('CONTENT_CACHE_SIZE') check = environ.get('CONTENT_CACHE_CHECK') connector = getUtility(IConnector) if", "else self.size / 5 self.connector = connector self.cache = defaultdict(lambda:", "None: return factory() obj = cache[unique_id] obj['used'] += 1 obj['last']", "if self.misses % self.check == 0: self.cleanup() else: self.hits +=", "is Connector: self.size = int(size) self.check = int(check) if check", "self.cache stats = (dict(uid=uid, used=cache[uid]['used']) for uid in cache) return", "if check is not None else self.size / 5 self.connector", "cache = obj['data'] if key not in cache: cache[key] =", "key not in cache: cache[key] = factory() self.misses += 1", "FEATURE_TOGGLES from zope.component import getUtility from zeit.connector.interfaces import IConnector from", "cache[key] = factory() self.misses += 1 log.debug('added %s (%s)', key,", "or not FEATURE_TOGGLES.find('content_caching'): return factory() try: mtime = int(self.connector.mtime(unique_id, suffix))", "Connector log = getLogger(__name__) class ContentCache(object): @cachedproperty def cache(self): size", "= connector self.cache = defaultdict(lambda: dict(used=0, mtimes={}, data={})) self.hits =", "%s)', size) return self.cache else: return None def get(self, unique_id,", "defaultdict from logging import getLogger from operator import itemgetter from", "import itemgetter from os import environ from time import time", "obj['mtimes'][suffix] = mtime cache = obj['data'] if key not in", "log.info('initialized content cache (size %s)', size) return self.cache else: return", "zeit.cms.content.sources import FEATURE_TOGGLES from zope.component import getUtility from zeit.connector.interfaces import", "for info in reversed(self.usage)} return dict( size=self.size, count=len(cache), hits=self.hits, misses=self.misses,", "1 obj['last'] = time() if mtime != obj['mtimes'].get(suffix): obj['data'].clear() obj['mtimes'][suffix]", "= factory() self.misses += 1 log.debug('added %s (%s)', key, mtime)", "getUtility(IConnector) if size is not None and type(connector) is Connector:", "+= 1 return cache[key] def cleanup(self): cache = self.cache over", "in cache) return sorted(stats, key=itemgetter('used')) def info(self): cache = self.cache", "None def get(self, unique_id, key, factory, suffix=''): cache = self.cache", "reversed(self.usage)} return dict( size=self.size, count=len(cache), hits=self.hits, misses=self.misses, usage=usage) __cache =", "self.misses % self.check == 0: self.cleanup() else: self.hits += 1", "cache = self.cache over = len(cache) - self.size log.info('size: %d/%d,", "try: mtime = int(self.connector.mtime(unique_id, suffix)) except (ValueError, TypeError): mtime =", "None and type(connector) is Connector: self.size = int(size) self.check =", "over = len(cache) - self.size log.info('size: %d/%d, hits: %d, misses:", "factory() self.misses += 1 log.debug('added %s (%s)', key, mtime) if", "= mtime cache = obj['data'] if key not in cache:", "not None else self.size / 5 self.connector = connector self.cache", "self.misses) if over > 0: log.debug('removing %d items', over) last", "<reponame>rickdg/vivi from collections import defaultdict from logging import getLogger from", "connector self.cache = defaultdict(lambda: dict(used=0, mtimes={}, data={})) self.hits = self.misses", "(size %s)', size) return self.cache else: return None def get(self,", "cache) return sorted(stats, key=itemgetter('used')) def info(self): cache = self.cache usage", "for uid in cache) return sorted(stats, key=itemgetter('used')) def info(self): cache", "= self.cache usage = {info['uid']: info['used'] for info in reversed(self.usage)}", "None or not FEATURE_TOGGLES.find('content_caching'): return factory() try: mtime = int(self.connector.mtime(unique_id,", "unique_id, key, factory, suffix=''): cache = self.cache if cache is", "%d items', over) last = sorted((cache[uid]['last'], uid) for uid in", "log.info('size: %d/%d, hits: %d, misses: %d', over + self.size, self.size,", "import IConnector from zeit.connector.filesystem import Connector log = getLogger(__name__) class" ]
[ "\"\"\"Genesais project annotation.\"\"\" def __init__(self, data, gencloud): for field in", "in data)) def data(self, **query): \"\"\"Query for Data object annotation.\"\"\"", "for d in self.gencloud.api.dataid.get(**query)['objects']) return [d for d in data", "project annotation.\"\"\" def __init__(self, data, gencloud): for field in data:", "self.gencloud = gencloud self.id = getattr(self, 'id', None) # pylint:", "data = self.gencloud.project_data(self.id) return sorted(set(d.type for d in data)) def", "self.gencloud.project_data(self.id) return sorted(set(d.type for d in data)) def data(self, **query):", "unicode_literals class GenProject(object): \"\"\"Genesais project annotation.\"\"\" def __init__(self, data, gencloud):", "def find(self, filter_str): \"\"\"Filter Data object annotation.\"\"\" raise NotImplementedError() def", "gencloud): for field in data: setattr(self, field, data[field]) self.gencloud =", "data(self, **query): \"\"\"Query for Data object annotation.\"\"\" data = self.gencloud.project_data(self.id)", "annotation.\"\"\" data = self.gencloud.project_data(self.id) query['case_ids__contains'] = self.id ids = set(d['id']", "data types.\"\"\" data = self.gencloud.project_data(self.id) return sorted(set(d.type for d in", "list of data types.\"\"\" data = self.gencloud.project_data(self.id) return sorted(set(d.type for", "data = self.gencloud.project_data(self.id) query['case_ids__contains'] = self.id ids = set(d['id'] for", "d.id in ids] def find(self, filter_str): \"\"\"Filter Data object annotation.\"\"\"", "[d for d in data if d.id in ids] def", "for Data object annotation.\"\"\" data = self.gencloud.project_data(self.id) query['case_ids__contains'] = self.id", "set(d['id'] for d in self.gencloud.api.dataid.get(**query)['objects']) return [d for d in", "data)) def data(self, **query): \"\"\"Query for Data object annotation.\"\"\" data", "pylint: disable=invalid-name self.name = getattr(self, 'name', None) def data_types(self): \"\"\"Return", "in ids] def find(self, filter_str): \"\"\"Filter Data object annotation.\"\"\" raise", "class GenProject(object): \"\"\"Genesais project annotation.\"\"\" def __init__(self, data, gencloud): for", "filter_str): \"\"\"Filter Data object annotation.\"\"\" raise NotImplementedError() def __str__(self): return", "def __init__(self, data, gencloud): for field in data: setattr(self, field,", "field in data: setattr(self, field, data[field]) self.gencloud = gencloud self.id", "for d in data if d.id in ids] def find(self,", "data[field]) self.gencloud = gencloud self.id = getattr(self, 'id', None) #", "NotImplementedError() def __str__(self): return self.name or 'n/a' def __repr__(self): return", "self.gencloud.api.dataid.get(**query)['objects']) return [d for d in data if d.id in", "d in self.gencloud.api.dataid.get(**query)['objects']) return [d for d in data if", "None) def data_types(self): \"\"\"Return a list of data types.\"\"\" data", "for d in data)) def data(self, **query): \"\"\"Query for Data", "annotation.\"\"\" def __init__(self, data, gencloud): for field in data: setattr(self,", "self.gencloud.project_data(self.id) query['case_ids__contains'] = self.id ids = set(d['id'] for d in", "object annotation.\"\"\" raise NotImplementedError() def __str__(self): return self.name or 'n/a'", "return [d for d in data if d.id in ids]", "getattr(self, 'name', None) def data_types(self): \"\"\"Return a list of data", "in self.gencloud.api.dataid.get(**query)['objects']) return [d for d in data if d.id", "getattr(self, 'id', None) # pylint: disable=invalid-name self.name = getattr(self, 'name',", "annotation.\"\"\" raise NotImplementedError() def __str__(self): return self.name or 'n/a' def", "ids] def find(self, filter_str): \"\"\"Filter Data object annotation.\"\"\" raise NotImplementedError()", "= self.gencloud.project_data(self.id) return sorted(set(d.type for d in data)) def data(self,", "print_function, unicode_literals class GenProject(object): \"\"\"Genesais project annotation.\"\"\" def __init__(self, data,", "ids = set(d['id'] for d in self.gencloud.api.dataid.get(**query)['objects']) return [d for", "field, data[field]) self.gencloud = gencloud self.id = getattr(self, 'id', None)", "object annotation.\"\"\" data = self.gencloud.project_data(self.id) query['case_ids__contains'] = self.id ids =", "Data object annotation.\"\"\" data = self.gencloud.project_data(self.id) query['case_ids__contains'] = self.id ids", "of data types.\"\"\" data = self.gencloud.project_data(self.id) return sorted(set(d.type for d", "for field in data: setattr(self, field, data[field]) self.gencloud = gencloud", "data_types(self): \"\"\"Return a list of data types.\"\"\" data = self.gencloud.project_data(self.id)", "from __future__ import absolute_import, division, print_function, unicode_literals class GenProject(object): \"\"\"Genesais", "self.name = getattr(self, 'name', None) def data_types(self): \"\"\"Return a list", "= gencloud self.id = getattr(self, 'id', None) # pylint: disable=invalid-name", "= self.gencloud.project_data(self.id) query['case_ids__contains'] = self.id ids = set(d['id'] for d", "'name', None) def data_types(self): \"\"\"Return a list of data types.\"\"\"", "sorted(set(d.type for d in data)) def data(self, **query): \"\"\"Query for", "in data if d.id in ids] def find(self, filter_str): \"\"\"Filter", "GenProject(object): \"\"\"Genesais project annotation.\"\"\" def __init__(self, data, gencloud): for field", "<reponame>genialis/genesis-genapi \"\"\"Project\"\"\" from __future__ import absolute_import, division, print_function, unicode_literals class", "__init__(self, data, gencloud): for field in data: setattr(self, field, data[field])", "None) # pylint: disable=invalid-name self.name = getattr(self, 'name', None) def", "self.id ids = set(d['id'] for d in self.gencloud.api.dataid.get(**query)['objects']) return [d", "\"\"\"Return a list of data types.\"\"\" data = self.gencloud.project_data(self.id) return", "raise NotImplementedError() def __str__(self): return self.name or 'n/a' def __repr__(self):", "= set(d['id'] for d in self.gencloud.api.dataid.get(**query)['objects']) return [d for d", "types.\"\"\" data = self.gencloud.project_data(self.id) return sorted(set(d.type for d in data))", "\"\"\"Query for Data object annotation.\"\"\" data = self.gencloud.project_data(self.id) query['case_ids__contains'] =", "\"\"\"Project\"\"\" from __future__ import absolute_import, division, print_function, unicode_literals class GenProject(object):", "= getattr(self, 'id', None) # pylint: disable=invalid-name self.name = getattr(self,", "def data(self, **query): \"\"\"Query for Data object annotation.\"\"\" data =", "\"\"\"Filter Data object annotation.\"\"\" raise NotImplementedError() def __str__(self): return self.name", "division, print_function, unicode_literals class GenProject(object): \"\"\"Genesais project annotation.\"\"\" def __init__(self,", "self.name or 'n/a' def __repr__(self): return u\"GenProject: {} - {}\".format(self.id,", "import absolute_import, division, print_function, unicode_literals class GenProject(object): \"\"\"Genesais project annotation.\"\"\"", "= self.id ids = set(d['id'] for d in self.gencloud.api.dataid.get(**query)['objects']) return", "d in data if d.id in ids] def find(self, filter_str):", "in data: setattr(self, field, data[field]) self.gencloud = gencloud self.id =", "return sorted(set(d.type for d in data)) def data(self, **query): \"\"\"Query", "if d.id in ids] def find(self, filter_str): \"\"\"Filter Data object", "return self.name or 'n/a' def __repr__(self): return u\"GenProject: {} -", "find(self, filter_str): \"\"\"Filter Data object annotation.\"\"\" raise NotImplementedError() def __str__(self):", "**query): \"\"\"Query for Data object annotation.\"\"\" data = self.gencloud.project_data(self.id) query['case_ids__contains']", "def data_types(self): \"\"\"Return a list of data types.\"\"\" data =", "disable=invalid-name self.name = getattr(self, 'name', None) def data_types(self): \"\"\"Return a", "d in data)) def data(self, **query): \"\"\"Query for Data object", "def __str__(self): return self.name or 'n/a' def __repr__(self): return u\"GenProject:", "a list of data types.\"\"\" data = self.gencloud.project_data(self.id) return sorted(set(d.type", "data if d.id in ids] def find(self, filter_str): \"\"\"Filter Data", "= getattr(self, 'name', None) def data_types(self): \"\"\"Return a list of", "absolute_import, division, print_function, unicode_literals class GenProject(object): \"\"\"Genesais project annotation.\"\"\" def", "data, gencloud): for field in data: setattr(self, field, data[field]) self.gencloud", "data: setattr(self, field, data[field]) self.gencloud = gencloud self.id = getattr(self,", "Data object annotation.\"\"\" raise NotImplementedError() def __str__(self): return self.name or", "self.id = getattr(self, 'id', None) # pylint: disable=invalid-name self.name =", "gencloud self.id = getattr(self, 'id', None) # pylint: disable=invalid-name self.name", "'id', None) # pylint: disable=invalid-name self.name = getattr(self, 'name', None)", "__str__(self): return self.name or 'n/a' def __repr__(self): return u\"GenProject: {}", "# pylint: disable=invalid-name self.name = getattr(self, 'name', None) def data_types(self):", "or 'n/a' def __repr__(self): return u\"GenProject: {} - {}\".format(self.id, self.name)", "query['case_ids__contains'] = self.id ids = set(d['id'] for d in self.gencloud.api.dataid.get(**query)['objects'])", "__future__ import absolute_import, division, print_function, unicode_literals class GenProject(object): \"\"\"Genesais project", "setattr(self, field, data[field]) self.gencloud = gencloud self.id = getattr(self, 'id'," ]
[ "django.db.models import QuerySet from django.utils.decorators import method_decorator from drf_yasg.utils import", "from account.serializers import CustomerInfoSerializer, SignUpFormSerializer @method_decorator(name='retrieve', decorator=swagger_auto_schema( operation_description=\"회원 개인정보 조회", "수정 API\", )) @method_decorator(name='destroy', decorator=swagger_auto_schema( operation_description=\"회원 탈퇴 API\", )) class", "QuerySet = Customer.objects permission_classes = (IsAuthenticated,) http_method_names = ['get', 'post',", "'delete'] def get_serializer_class(self): if self.request.method == 'POST': return SignUpFormSerializer elif", "import IsAuthenticated, AllowAny from rest_framework.response import Response from rest_framework.viewsets import", "@method_decorator(name='create', decorator=swagger_auto_schema( operation_description=\"회원 가입 API\", )) @method_decorator(name='update', decorator=swagger_auto_schema( operation_description=\"회원 정보", "in permission_classes] def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data)", "import swagger_auto_schema from rest_framework import viewsets, status from rest_framework.permissions import", ")) class CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): queryset: QuerySet =", "CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): queryset: QuerySet = Customer.objects permission_classes", "self.request.method == 'POST': return SignUpFormSerializer elif self.request.method == 'GET': return", "== 'DELETE': return SignUpFormSerializer def get_permissions(self): if self.request.method == 'POST':", "views here. from django.db.models import QuerySet from django.utils.decorators import method_decorator", "viewsets.GenericViewSet): queryset: QuerySet = Customer.objects permission_classes = (IsAuthenticated,) http_method_names =", ")) @method_decorator(name='create', decorator=swagger_auto_schema( operation_description=\"회원 가입 API\", )) @method_decorator(name='update', decorator=swagger_auto_schema( operation_description=\"회원", "= (IsAuthenticated,) http_method_names = ['get', 'post', 'put', 'delete'] def get_serializer_class(self):", "'put', 'delete'] def get_serializer_class(self): if self.request.method == 'POST': return SignUpFormSerializer", "'POST': permission_classes = [AllowAny] return [permission() for permission in permission_classes]", "import mixins from account.documents import DjangoFilterDescriptionInspector from account.models import Customer", "'GET': return CustomerInfoSerializer elif self.request.method == 'PUT': return SignUpFormSerializer elif", "= Customer.objects permission_classes = (IsAuthenticated,) http_method_names = ['get', 'post', 'put',", "class CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): queryset: QuerySet = Customer.objects", "IsAuthenticated, AllowAny from rest_framework.response import Response from rest_framework.viewsets import mixins", "if self.request.method == 'POST': return SignUpFormSerializer elif self.request.method == 'GET':", "mixins.DestroyModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): queryset: QuerySet = Customer.objects permission_classes =", "import Customer from account.serializers import CustomerInfoSerializer, SignUpFormSerializer @method_decorator(name='retrieve', decorator=swagger_auto_schema( operation_description=\"회원", "decorator=swagger_auto_schema( operation_description=\"회원 탈퇴 API\", )) class CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin,", "QuerySet from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from", "DjangoFilterDescriptionInspector from account.models import Customer from account.serializers import CustomerInfoSerializer, SignUpFormSerializer", "정보 수정 API\", )) @method_decorator(name='destroy', decorator=swagger_auto_schema( operation_description=\"회원 탈퇴 API\", ))", ")) @method_decorator(name='destroy', decorator=swagger_auto_schema( operation_description=\"회원 탈퇴 API\", )) class CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin,", "operation_description=\"회원 탈퇴 API\", )) class CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet):", "filter_inspectors=[DjangoFilterDescriptionInspector], )) @method_decorator(name='create', decorator=swagger_auto_schema( operation_description=\"회원 가입 API\", )) @method_decorator(name='update', decorator=swagger_auto_schema(", "rest_framework.permissions import IsAuthenticated, AllowAny from rest_framework.response import Response from rest_framework.viewsets", "request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers =", "= ['get', 'post', 'put', 'delete'] def get_serializer_class(self): if self.request.method ==", "your views here. from django.db.models import QuerySet from django.utils.decorators import", "== 'GET': return CustomerInfoSerializer elif self.request.method == 'PUT': return SignUpFormSerializer", "self.request.method == 'DELETE': return SignUpFormSerializer def get_permissions(self): if self.request.method ==", "for permission in permission_classes] def create(self, request, *args, **kwargs): serializer", ")) @method_decorator(name='update', decorator=swagger_auto_schema( operation_description=\"회원 정보 수정 API\", )) @method_decorator(name='destroy', decorator=swagger_auto_schema(", "return SignUpFormSerializer elif self.request.method == 'GET': return CustomerInfoSerializer elif self.request.method", "from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from rest_framework", "elif self.request.method == 'GET': return CustomerInfoSerializer elif self.request.method == 'PUT':", "Response from rest_framework.viewsets import mixins from account.documents import DjangoFilterDescriptionInspector from", "@method_decorator(name='retrieve', decorator=swagger_auto_schema( operation_description=\"회원 개인정보 조회 API\", filter_inspectors=[DjangoFilterDescriptionInspector], )) @method_decorator(name='create', decorator=swagger_auto_schema(", "self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data) return Response({'id': serializer.data['id']}, status=status.HTTP_201_CREATED,", "drf_yasg.utils import swagger_auto_schema from rest_framework import viewsets, status from rest_framework.permissions", "def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer)", "mixins from account.documents import DjangoFilterDescriptionInspector from account.models import Customer from", "elif self.request.method == 'PUT': return SignUpFormSerializer elif self.request.method == 'DELETE':", "mixins.UpdateModelMixin, viewsets.GenericViewSet): queryset: QuerySet = Customer.objects permission_classes = (IsAuthenticated,) http_method_names", "return SignUpFormSerializer elif self.request.method == 'DELETE': return SignUpFormSerializer def get_permissions(self):", "serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data) return Response({'id':", "[AllowAny] return [permission() for permission in permission_classes] def create(self, request,", "SignUpFormSerializer elif self.request.method == 'GET': return CustomerInfoSerializer elif self.request.method ==", "elif self.request.method == 'DELETE': return SignUpFormSerializer def get_permissions(self): if self.request.method", "'DELETE': return SignUpFormSerializer def get_permissions(self): if self.request.method == 'POST': permission_classes", "decorator=swagger_auto_schema( operation_description=\"회원 가입 API\", )) @method_decorator(name='update', decorator=swagger_auto_schema( operation_description=\"회원 정보 수정", "from account.models import Customer from account.serializers import CustomerInfoSerializer, SignUpFormSerializer @method_decorator(name='retrieve',", "if self.request.method == 'POST': permission_classes = [AllowAny] return [permission() for", "decorator=swagger_auto_schema( operation_description=\"회원 정보 수정 API\", )) @method_decorator(name='destroy', decorator=swagger_auto_schema( operation_description=\"회원 탈퇴", "import CustomerInfoSerializer, SignUpFormSerializer @method_decorator(name='retrieve', decorator=swagger_auto_schema( operation_description=\"회원 개인정보 조회 API\", filter_inspectors=[DjangoFilterDescriptionInspector],", "def get_permissions(self): if self.request.method == 'POST': permission_classes = [AllowAny] return", "= self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data) return Response({'id': serializer.data['id']},", "permission_classes] def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True)", "[permission() for permission in permission_classes] def create(self, request, *args, **kwargs):", "permission_classes = (IsAuthenticated,) http_method_names = ['get', 'post', 'put', 'delete'] def", "serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data) return Response({'id': serializer.data['id']}, status=status.HTTP_201_CREATED, headers=headers)", "CustomerInfoSerializer, SignUpFormSerializer @method_decorator(name='retrieve', decorator=swagger_auto_schema( operation_description=\"회원 개인정보 조회 API\", filter_inspectors=[DjangoFilterDescriptionInspector], ))", "from rest_framework.response import Response from rest_framework.viewsets import mixins from account.documents", "*args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data)", "API\", )) class CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): queryset: QuerySet", "get_serializer_class(self): if self.request.method == 'POST': return SignUpFormSerializer elif self.request.method ==", "return SignUpFormSerializer def get_permissions(self): if self.request.method == 'POST': permission_classes =", "가입 API\", )) @method_decorator(name='update', decorator=swagger_auto_schema( operation_description=\"회원 정보 수정 API\", ))", "def get_serializer_class(self): if self.request.method == 'POST': return SignUpFormSerializer elif self.request.method", "CustomerInfoSerializer elif self.request.method == 'PUT': return SignUpFormSerializer elif self.request.method ==", "import QuerySet from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema", "API\", )) @method_decorator(name='destroy', decorator=swagger_auto_schema( operation_description=\"회원 탈퇴 API\", )) class CustomerAPIViewSet(mixins.CreateModelMixin,", "swagger_auto_schema from rest_framework import viewsets, status from rest_framework.permissions import IsAuthenticated,", "viewsets, status from rest_framework.permissions import IsAuthenticated, AllowAny from rest_framework.response import", "import method_decorator from drf_yasg.utils import swagger_auto_schema from rest_framework import viewsets,", "== 'POST': permission_classes = [AllowAny] return [permission() for permission in", "return CustomerInfoSerializer elif self.request.method == 'PUT': return SignUpFormSerializer elif self.request.method", "queryset: QuerySet = Customer.objects permission_classes = (IsAuthenticated,) http_method_names = ['get',", "account.serializers import CustomerInfoSerializer, SignUpFormSerializer @method_decorator(name='retrieve', decorator=swagger_auto_schema( operation_description=\"회원 개인정보 조회 API\",", "self.request.method == 'POST': permission_classes = [AllowAny] return [permission() for permission", "django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from rest_framework import", "from account.documents import DjangoFilterDescriptionInspector from account.models import Customer from account.serializers", "Customer from account.serializers import CustomerInfoSerializer, SignUpFormSerializer @method_decorator(name='retrieve', decorator=swagger_auto_schema( operation_description=\"회원 개인정보", "account.documents import DjangoFilterDescriptionInspector from account.models import Customer from account.serializers import", "mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): queryset: QuerySet = Customer.objects permission_classes = (IsAuthenticated,)", "<reponame>KimSoungRyoul/drf_unitteset_study_project<gh_stars>0 # Create your views here. from django.db.models import QuerySet", "return [permission() for permission in permission_classes] def create(self, request, *args,", "here. from django.db.models import QuerySet from django.utils.decorators import method_decorator from", "decorator=swagger_auto_schema( operation_description=\"회원 개인정보 조회 API\", filter_inspectors=[DjangoFilterDescriptionInspector], )) @method_decorator(name='create', decorator=swagger_auto_schema( operation_description=\"회원", "rest_framework import viewsets, status from rest_framework.permissions import IsAuthenticated, AllowAny from", "rest_framework.response import Response from rest_framework.viewsets import mixins from account.documents import", "from rest_framework.permissions import IsAuthenticated, AllowAny from rest_framework.response import Response from", "조회 API\", filter_inspectors=[DjangoFilterDescriptionInspector], )) @method_decorator(name='create', decorator=swagger_auto_schema( operation_description=\"회원 가입 API\", ))", "import viewsets, status from rest_framework.permissions import IsAuthenticated, AllowAny from rest_framework.response", "SignUpFormSerializer elif self.request.method == 'DELETE': return SignUpFormSerializer def get_permissions(self): if", "# Create your views here. from django.db.models import QuerySet from", "permission in permission_classes] def create(self, request, *args, **kwargs): serializer =", "탈퇴 API\", )) class CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): queryset:", "Customer.objects permission_classes = (IsAuthenticated,) http_method_names = ['get', 'post', 'put', 'delete']", "@method_decorator(name='update', decorator=swagger_auto_schema( operation_description=\"회원 정보 수정 API\", )) @method_decorator(name='destroy', decorator=swagger_auto_schema( operation_description=\"회원", "'POST': return SignUpFormSerializer elif self.request.method == 'GET': return CustomerInfoSerializer elif", "import DjangoFilterDescriptionInspector from account.models import Customer from account.serializers import CustomerInfoSerializer,", "account.models import Customer from account.serializers import CustomerInfoSerializer, SignUpFormSerializer @method_decorator(name='retrieve', decorator=swagger_auto_schema(", "operation_description=\"회원 개인정보 조회 API\", filter_inspectors=[DjangoFilterDescriptionInspector], )) @method_decorator(name='create', decorator=swagger_auto_schema( operation_description=\"회원 가입", "'PUT': return SignUpFormSerializer elif self.request.method == 'DELETE': return SignUpFormSerializer def", "from django.db.models import QuerySet from django.utils.decorators import method_decorator from drf_yasg.utils", "개인정보 조회 API\", filter_inspectors=[DjangoFilterDescriptionInspector], )) @method_decorator(name='create', decorator=swagger_auto_schema( operation_description=\"회원 가입 API\",", "rest_framework.viewsets import mixins from account.documents import DjangoFilterDescriptionInspector from account.models import", "self.request.method == 'PUT': return SignUpFormSerializer elif self.request.method == 'DELETE': return", "(IsAuthenticated,) http_method_names = ['get', 'post', 'put', 'delete'] def get_serializer_class(self): if", "API\", )) @method_decorator(name='update', decorator=swagger_auto_schema( operation_description=\"회원 정보 수정 API\", )) @method_decorator(name='destroy',", "from rest_framework import viewsets, status from rest_framework.permissions import IsAuthenticated, AllowAny", "operation_description=\"회원 정보 수정 API\", )) @method_decorator(name='destroy', decorator=swagger_auto_schema( operation_description=\"회원 탈퇴 API\",", "Create your views here. from django.db.models import QuerySet from django.utils.decorators", "= [AllowAny] return [permission() for permission in permission_classes] def create(self,", "get_permissions(self): if self.request.method == 'POST': permission_classes = [AllowAny] return [permission()", "operation_description=\"회원 가입 API\", )) @method_decorator(name='update', decorator=swagger_auto_schema( operation_description=\"회원 정보 수정 API\",", "self.request.method == 'GET': return CustomerInfoSerializer elif self.request.method == 'PUT': return", "SignUpFormSerializer @method_decorator(name='retrieve', decorator=swagger_auto_schema( operation_description=\"회원 개인정보 조회 API\", filter_inspectors=[DjangoFilterDescriptionInspector], )) @method_decorator(name='create',", "method_decorator from drf_yasg.utils import swagger_auto_schema from rest_framework import viewsets, status", "import Response from rest_framework.viewsets import mixins from account.documents import DjangoFilterDescriptionInspector", "SignUpFormSerializer def get_permissions(self): if self.request.method == 'POST': permission_classes = [AllowAny]", "status from rest_framework.permissions import IsAuthenticated, AllowAny from rest_framework.response import Response", "@method_decorator(name='destroy', decorator=swagger_auto_schema( operation_description=\"회원 탈퇴 API\", )) class CustomerAPIViewSet(mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin,", "API\", filter_inspectors=[DjangoFilterDescriptionInspector], )) @method_decorator(name='create', decorator=swagger_auto_schema( operation_description=\"회원 가입 API\", )) @method_decorator(name='update',", "from rest_framework.viewsets import mixins from account.documents import DjangoFilterDescriptionInspector from account.models", "http_method_names = ['get', 'post', 'put', 'delete'] def get_serializer_class(self): if self.request.method", "**kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data) return", "create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers", "permission_classes = [AllowAny] return [permission() for permission in permission_classes] def", "'post', 'put', 'delete'] def get_serializer_class(self): if self.request.method == 'POST': return", "AllowAny from rest_framework.response import Response from rest_framework.viewsets import mixins from", "['get', 'post', 'put', 'delete'] def get_serializer_class(self): if self.request.method == 'POST':", "== 'PUT': return SignUpFormSerializer elif self.request.method == 'DELETE': return SignUpFormSerializer", "from drf_yasg.utils import swagger_auto_schema from rest_framework import viewsets, status from", "== 'POST': return SignUpFormSerializer elif self.request.method == 'GET': return CustomerInfoSerializer" ]
[ "namespace): get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd, namespace) def validate_probe_settings(cmd, namespace): get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd, namespace)", "pylint: disable=no-self-use def parse_match_condition(self, values): from azext_front_door.vendored_sdks.models import MatchCondition if", "import is_valid_resource_id, resource_id subscription_id = get_subscription_id(cmd.cli_ctx) resource_group = namespace.resource_group_name names_or_ids", "def _validate_name_or_id(cmd, namespace): from azure.cli.core.commands.client_factory import get_subscription_id from msrestazure.tools import", "the end if not isinstance(names_or_ids, list): is_list = False names_or_ids", "return ids = [] for val in names_or_ids: id_params =", "for val in names_or_ids: id_params = { 'subscription': subscription_id, 'resource_group':", "namespace) def validate_backend_pool(cmd, namespace): get_name_or_id_validator('backend_pool', 'backendPools')(cmd, namespace) def validate_rules_engine(cmd, namespace):", "namespace): get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd, namespace) def validate_backend_pool(cmd, namespace): get_name_or_id_validator('backend_pool', 'backendPools')(cmd, namespace)", "[VALUE [VALUE ...]]') def __call__(self, parser, namespace, values, option_string=None): match_condition", "'healthProbeSettings')(cmd, namespace) def validate_frontend_endpoints(cmd, namespace): get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd, namespace) def validate_backend_pool(cmd,", "the MIT License. See License.txt in the project root for", "= getattr(namespace, dest) is_list = True # treat single values", "'child_type_1': child_type, 'child_name_1': val if child_type else None } if", "--match-condition VARIABLE OPERATOR [VALUE [VALUE ...]]') def __call__(self, parser, namespace,", "validate_probe_settings(cmd, namespace): get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd, namespace) def validate_frontend_endpoints(cmd, namespace): get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd,", "id_params = { 'subscription': subscription_id, 'resource_group': resource_group, 'namespace': resource_namespace, 'type':", "validate_waf_policy(cmd, namespace): get_name_or_id_validator( dest='waf_policy', resource_type='WebApplicationFirewallPolicy' )(cmd, namespace) def validate_keyvault(cmd, namespace):", "treat single values as a list, but convert back in", "'namespace': resource_namespace, 'type': resource_type, 'name': getattr(namespace, resource_name_dest) if child_type else", "is_list = False names_or_ids = [names_or_ids] if names_or_ids == [None]", "child_type else None } if not is_valid_resource_id(val): val = resource_id(**id_params)", "import CLIError raise CLIError('usage error: --match-condition VARIABLE OPERATOR [VALUE [VALUE", "from msrestazure.tools import is_valid_resource_id, resource_id subscription_id = get_subscription_id(cmd.cli_ctx) resource_group =", "root for license information. # -------------------------------------------------------------------------------------------- import argparse def get_name_or_id_validator(dest,", "in names_or_ids: id_params = { 'subscription': subscription_id, 'resource_group': resource_group, 'namespace':", "# pylint: disable=protected-access class MatchConditionAction(argparse._AppendAction): # pylint: disable=no-self-use def parse_match_condition(self,", "isinstance(names_or_ids, list): is_list = False names_or_ids = [names_or_ids] if names_or_ids", "import argparse def get_name_or_id_validator(dest, child_type=None, resource_type='Frontdoors', resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'): def _validate_name_or_id(cmd,", "resource_type='WebApplicationFirewallPolicy' )(cmd, namespace) def validate_keyvault(cmd, namespace): get_name_or_id_validator( dest='vault', resource_type='vaults', resource_namespace='Microsoft.Keyvault'", "else None } if not is_valid_resource_id(val): val = resource_id(**id_params) ids.append(val)", "azext_front_door.vendored_sdks.models import MatchCondition if not isinstance(values, list): values = values.split('", "resource_group = namespace.resource_group_name names_or_ids = getattr(namespace, dest) is_list = True", "namespace): from azure.cli.core.commands.client_factory import get_subscription_id from msrestazure.tools import is_valid_resource_id, resource_id", "is_list = True # treat single values as a list,", "namespace) def validate_keyvault(cmd, namespace): get_name_or_id_validator( dest='vault', resource_type='vaults', resource_namespace='Microsoft.Keyvault' )(cmd, namespace)", "'resource_group': resource_group, 'namespace': resource_namespace, 'type': resource_type, 'name': getattr(namespace, resource_name_dest) if", "# pylint: disable=no-self-use def parse_match_condition(self, values): from azext_front_door.vendored_sdks.models import MatchCondition", "child_type else val, 'child_type_1': child_type, 'child_name_1': val if child_type else", "See License.txt in the project root for license information. #", "names_or_ids: id_params = { 'subscription': subscription_id, 'resource_group': resource_group, 'namespace': resource_namespace,", "project root for license information. # -------------------------------------------------------------------------------------------- import argparse def", "'frontendEndpoints')(cmd, namespace) def validate_backend_pool(cmd, namespace): get_name_or_id_validator('backend_pool', 'backendPools')(cmd, namespace) def validate_rules_engine(cmd,", "namespace.resource_group_name names_or_ids = getattr(namespace, dest) is_list = True # treat", "not is_valid_resource_id(val): val = resource_id(**id_params) ids.append(val) setattr(namespace, dest, ids if", "not names_or_ids: return ids = [] for val in names_or_ids:", "getattr(namespace, dest) is_list = True # treat single values as", "as a list, but convert back in the end if", "get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd, namespace) def validate_probe_settings(cmd, namespace): get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd, namespace) def", "pylint: disable=protected-access class MatchConditionAction(argparse._AppendAction): # pylint: disable=no-self-use def parse_match_condition(self, values):", "'backendPools')(cmd, namespace) def validate_rules_engine(cmd, namespace): get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd, namespace) # pylint:", "= namespace.resource_group_name names_or_ids = getattr(namespace, dest) is_list = True #", "[None] or not names_or_ids: return ids = [] for val", "the project root for license information. # -------------------------------------------------------------------------------------------- import argparse", "validate_rules_engine(cmd, namespace): get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd, namespace) # pylint: disable=protected-access class MatchConditionAction(argparse._AppendAction):", "disable=no-self-use def parse_match_condition(self, values): from azext_front_door.vendored_sdks.models import MatchCondition if not", "if child_type else None } if not is_valid_resource_id(val): val =", "dest, ids if is_list else ids[0]) return _validate_name_or_id def validate_waf_policy(cmd,", "list): is_list = False names_or_ids = [names_or_ids] if names_or_ids ==", "license information. # -------------------------------------------------------------------------------------------- import argparse def get_name_or_id_validator(dest, child_type=None, resource_type='Frontdoors',", "error: --match-condition VARIABLE OPERATOR [VALUE [VALUE ...]]') def __call__(self, parser,", "get_name_or_id_validator('backend_pool', 'backendPools')(cmd, namespace) def validate_rules_engine(cmd, namespace): get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd, namespace) #", "'loadBalancingSettings')(cmd, namespace) def validate_probe_settings(cmd, namespace): get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd, namespace) def validate_frontend_endpoints(cmd,", "def validate_frontend_endpoints(cmd, namespace): get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd, namespace) def validate_backend_pool(cmd, namespace): get_name_or_id_validator('backend_pool',", "...]]') def __call__(self, parser, namespace, values, option_string=None): match_condition = self.parse_match_condition(values)", "namespace) def validate_frontend_endpoints(cmd, namespace): get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd, namespace) def validate_backend_pool(cmd, namespace):", "values as a list, but convert back in the end", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", "resource_type='Frontdoors', resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'): def _validate_name_or_id(cmd, namespace): from azure.cli.core.commands.client_factory import get_subscription_id", "_validate_name_or_id def validate_waf_policy(cmd, namespace): get_name_or_id_validator( dest='waf_policy', resource_type='WebApplicationFirewallPolicy' )(cmd, namespace) def", ")(cmd, namespace) def validate_keyvault(cmd, namespace): get_name_or_id_validator( dest='vault', resource_type='vaults', resource_namespace='Microsoft.Keyvault' )(cmd,", "from knack.util import CLIError raise CLIError('usage error: --match-condition VARIABLE OPERATOR", "CLIError raise CLIError('usage error: --match-condition VARIABLE OPERATOR [VALUE [VALUE ...]]')", "= False names_or_ids = [names_or_ids] if names_or_ids == [None] or", "get_subscription_id(cmd.cli_ctx) resource_group = namespace.resource_group_name names_or_ids = getattr(namespace, dest) is_list =", "if names_or_ids == [None] or not names_or_ids: return ids =", "return _validate_name_or_id def validate_waf_policy(cmd, namespace): get_name_or_id_validator( dest='waf_policy', resource_type='WebApplicationFirewallPolicy' )(cmd, namespace)", "namespace): get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd, namespace) # pylint: disable=protected-access class MatchConditionAction(argparse._AppendAction): #", "raise CLIError('usage error: --match-condition VARIABLE OPERATOR [VALUE [VALUE ...]]') def", "'subscription': subscription_id, 'resource_group': resource_group, 'namespace': resource_namespace, 'type': resource_type, 'name': getattr(namespace,", "MIT License. See License.txt in the project root for license", "for license information. # -------------------------------------------------------------------------------------------- import argparse def get_name_or_id_validator(dest, child_type=None,", "_validate_name_or_id(cmd, namespace): from azure.cli.core.commands.client_factory import get_subscription_id from msrestazure.tools import is_valid_resource_id,", "resource_id subscription_id = get_subscription_id(cmd.cli_ctx) resource_group = namespace.resource_group_name names_or_ids = getattr(namespace,", "[VALUE ...]]') def __call__(self, parser, namespace, values, option_string=None): match_condition =", "= { 'subscription': subscription_id, 'resource_group': resource_group, 'namespace': resource_namespace, 'type': resource_type,", "validate_frontend_endpoints(cmd, namespace): get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd, namespace) def validate_backend_pool(cmd, namespace): get_name_or_id_validator('backend_pool', 'backendPools')(cmd,", "def validate_rules_engine(cmd, namespace): get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd, namespace) # pylint: disable=protected-access class", "reserved. # Licensed under the MIT License. See License.txt in", "resource_group, 'namespace': resource_namespace, 'type': resource_type, 'name': getattr(namespace, resource_name_dest) if child_type", "setattr(namespace, dest, ids if is_list else ids[0]) return _validate_name_or_id def", "validate_load_balancing_settings(cmd, namespace): get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd, namespace) def validate_probe_settings(cmd, namespace): get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd,", "MatchCondition if not isinstance(values, list): values = values.split(' ') try:", "ids if is_list else ids[0]) return _validate_name_or_id def validate_waf_policy(cmd, namespace):", "subscription_id = get_subscription_id(cmd.cli_ctx) resource_group = namespace.resource_group_name names_or_ids = getattr(namespace, dest)", "resource_namespace, 'type': resource_type, 'name': getattr(namespace, resource_name_dest) if child_type else val,", "if not isinstance(values, list): values = values.split(' ') try: return", "information. # -------------------------------------------------------------------------------------------- import argparse def get_name_or_id_validator(dest, child_type=None, resource_type='Frontdoors', resource_namespace='Microsoft.Network',", "is_valid_resource_id(val): val = resource_id(**id_params) ids.append(val) setattr(namespace, dest, ids if is_list", "val, 'child_type_1': child_type, 'child_name_1': val if child_type else None }", "namespace): get_name_or_id_validator('backend_pool', 'backendPools')(cmd, namespace) def validate_rules_engine(cmd, namespace): get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd, namespace)", "# treat single values as a list, but convert back", "values, option_string=None): match_condition = self.parse_match_condition(values) super(MatchConditionAction, self).__call__(parser, namespace, match_condition, option_string)", "Corporation. All rights reserved. # Licensed under the MIT License.", "# Licensed under the MIT License. See License.txt in the", "get_subscription_id from msrestazure.tools import is_valid_resource_id, resource_id subscription_id = get_subscription_id(cmd.cli_ctx) resource_group", "names_or_ids = getattr(namespace, dest) is_list = True # treat single", "msrestazure.tools import is_valid_resource_id, resource_id subscription_id = get_subscription_id(cmd.cli_ctx) resource_group = namespace.resource_group_name", "-------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. #", "= True # treat single values as a list, but", "child_type=None, resource_type='Frontdoors', resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'): def _validate_name_or_id(cmd, namespace): from azure.cli.core.commands.client_factory import", "not isinstance(names_or_ids, list): is_list = False names_or_ids = [names_or_ids] if", "if is_list else ids[0]) return _validate_name_or_id def validate_waf_policy(cmd, namespace): get_name_or_id_validator(", "def validate_backend_pool(cmd, namespace): get_name_or_id_validator('backend_pool', 'backendPools')(cmd, namespace) def validate_rules_engine(cmd, namespace): get_name_or_id_validator('rules_engine',", "try: return MatchCondition( match_variable=values[0], operator=values[1], match_value=values[2:] ) except IndexError: from", "get_name_or_id_validator(dest, child_type=None, resource_type='Frontdoors', resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'): def _validate_name_or_id(cmd, namespace): from azure.cli.core.commands.client_factory", "if child_type else val, 'child_type_1': child_type, 'child_name_1': val if child_type", "names_or_ids: return ids = [] for val in names_or_ids: id_params", "names_or_ids = [names_or_ids] if names_or_ids == [None] or not names_or_ids:", "from azext_front_door.vendored_sdks.models import MatchCondition if not isinstance(values, list): values =", "License.txt in the project root for license information. # --------------------------------------------------------------------------------------------", "OPERATOR [VALUE [VALUE ...]]') def __call__(self, parser, namespace, values, option_string=None):", "= get_subscription_id(cmd.cli_ctx) resource_group = namespace.resource_group_name names_or_ids = getattr(namespace, dest) is_list", "getattr(namespace, resource_name_dest) if child_type else val, 'child_type_1': child_type, 'child_name_1': val", "= values.split(' ') try: return MatchCondition( match_variable=values[0], operator=values[1], match_value=values[2:] )", "azure.cli.core.commands.client_factory import get_subscription_id from msrestazure.tools import is_valid_resource_id, resource_id subscription_id =", "if not isinstance(names_or_ids, list): is_list = False names_or_ids = [names_or_ids]", "subscription_id, 'resource_group': resource_group, 'namespace': resource_namespace, 'type': resource_type, 'name': getattr(namespace, resource_name_dest)", "') try: return MatchCondition( match_variable=values[0], operator=values[1], match_value=values[2:] ) except IndexError:", "single values as a list, but convert back in the", "License. See License.txt in the project root for license information.", "'child_name_1': val if child_type else None } if not is_valid_resource_id(val):", "# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved.", "parser, namespace, values, option_string=None): match_condition = self.parse_match_condition(values) super(MatchConditionAction, self).__call__(parser, namespace,", "names_or_ids == [None] or not names_or_ids: return ids = []", "validate_backend_pool(cmd, namespace): get_name_or_id_validator('backend_pool', 'backendPools')(cmd, namespace) def validate_rules_engine(cmd, namespace): get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd,", "disable=protected-access class MatchConditionAction(argparse._AppendAction): # pylint: disable=no-self-use def parse_match_condition(self, values): from", "dest='waf_policy', resource_type='WebApplicationFirewallPolicy' )(cmd, namespace) def validate_keyvault(cmd, namespace): get_name_or_id_validator( dest='vault', resource_type='vaults',", "namespace) # pylint: disable=protected-access class MatchConditionAction(argparse._AppendAction): # pylint: disable=no-self-use def", "get_name_or_id_validator( dest='waf_policy', resource_type='WebApplicationFirewallPolicy' )(cmd, namespace) def validate_keyvault(cmd, namespace): get_name_or_id_validator( dest='vault',", "False names_or_ids = [names_or_ids] if names_or_ids == [None] or not", "get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd, namespace) def validate_backend_pool(cmd, namespace): get_name_or_id_validator('backend_pool', 'backendPools')(cmd, namespace) def", "namespace): get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd, namespace) def validate_frontend_endpoints(cmd, namespace): get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd, namespace)", "val = resource_id(**id_params) ids.append(val) setattr(namespace, dest, ids if is_list else", "def validate_waf_policy(cmd, namespace): get_name_or_id_validator( dest='waf_policy', resource_type='WebApplicationFirewallPolicy' )(cmd, namespace) def validate_keyvault(cmd,", "match_variable=values[0], operator=values[1], match_value=values[2:] ) except IndexError: from knack.util import CLIError", "get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd, namespace) def validate_frontend_endpoints(cmd, namespace): get_name_or_id_validator('frontend_endpoints', 'frontendEndpoints')(cmd, namespace) def", "if not is_valid_resource_id(val): val = resource_id(**id_params) ids.append(val) setattr(namespace, dest, ids", "values): from azext_front_door.vendored_sdks.models import MatchCondition if not isinstance(values, list): values", "from azure.cli.core.commands.client_factory import get_subscription_id from msrestazure.tools import is_valid_resource_id, resource_id subscription_id", "resource_id(**id_params) ids.append(val) setattr(namespace, dest, ids if is_list else ids[0]) return", "namespace) def validate_rules_engine(cmd, namespace): get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd, namespace) # pylint: disable=protected-access", "knack.util import CLIError raise CLIError('usage error: --match-condition VARIABLE OPERATOR [VALUE", "None } if not is_valid_resource_id(val): val = resource_id(**id_params) ids.append(val) setattr(namespace,", "MatchConditionAction(argparse._AppendAction): # pylint: disable=no-self-use def parse_match_condition(self, values): from azext_front_door.vendored_sdks.models import", "namespace) def validate_load_balancing_settings(cmd, namespace): get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd, namespace) def validate_probe_settings(cmd, namespace):", "isinstance(values, list): values = values.split(' ') try: return MatchCondition( match_variable=values[0],", "but convert back in the end if not isinstance(names_or_ids, list):", "val in names_or_ids: id_params = { 'subscription': subscription_id, 'resource_group': resource_group,", "resource_name_dest='front_door_name'): def _validate_name_or_id(cmd, namespace): from azure.cli.core.commands.client_factory import get_subscription_id from msrestazure.tools", "dest) is_list = True # treat single values as a", "namespace, values, option_string=None): match_condition = self.parse_match_condition(values) super(MatchConditionAction, self).__call__(parser, namespace, match_condition,", "is_list else ids[0]) return _validate_name_or_id def validate_waf_policy(cmd, namespace): get_name_or_id_validator( dest='waf_policy',", "under the MIT License. See License.txt in the project root", "or not names_or_ids: return ids = [] for val in", "list): values = values.split(' ') try: return MatchCondition( match_variable=values[0], operator=values[1],", "resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'): def _validate_name_or_id(cmd, namespace): from azure.cli.core.commands.client_factory import get_subscription_id from", "def parse_match_condition(self, values): from azext_front_door.vendored_sdks.models import MatchCondition if not isinstance(values,", "(c) Microsoft Corporation. All rights reserved. # Licensed under the", "All rights reserved. # Licensed under the MIT License. See", "except IndexError: from knack.util import CLIError raise CLIError('usage error: --match-condition", "True # treat single values as a list, but convert", "return MatchCondition( match_variable=values[0], operator=values[1], match_value=values[2:] ) except IndexError: from knack.util", "class MatchConditionAction(argparse._AppendAction): # pylint: disable=no-self-use def parse_match_condition(self, values): from azext_front_door.vendored_sdks.models", "def __call__(self, parser, namespace, values, option_string=None): match_condition = self.parse_match_condition(values) super(MatchConditionAction,", "parse_match_condition(self, values): from azext_front_door.vendored_sdks.models import MatchCondition if not isinstance(values, list):", ")(cmd, namespace) def validate_load_balancing_settings(cmd, namespace): get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd, namespace) def validate_probe_settings(cmd,", "def validate_load_balancing_settings(cmd, namespace): get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd, namespace) def validate_probe_settings(cmd, namespace): get_name_or_id_validator('probe_settings',", "dest='vault', resource_type='vaults', resource_namespace='Microsoft.Keyvault' )(cmd, namespace) def validate_load_balancing_settings(cmd, namespace): get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd,", "match_value=values[2:] ) except IndexError: from knack.util import CLIError raise CLIError('usage", "'type': resource_type, 'name': getattr(namespace, resource_name_dest) if child_type else val, 'child_type_1':", "== [None] or not names_or_ids: return ids = [] for", "} if not is_valid_resource_id(val): val = resource_id(**id_params) ids.append(val) setattr(namespace, dest,", "child_type, 'child_name_1': val if child_type else None } if not", "values.split(' ') try: return MatchCondition( match_variable=values[0], operator=values[1], match_value=values[2:] ) except", "CLIError('usage error: --match-condition VARIABLE OPERATOR [VALUE [VALUE ...]]') def __call__(self,", "val if child_type else None } if not is_valid_resource_id(val): val", "get_name_or_id_validator( dest='vault', resource_type='vaults', resource_namespace='Microsoft.Keyvault' )(cmd, namespace) def validate_load_balancing_settings(cmd, namespace): get_name_or_id_validator('load_balancing_settings',", "not isinstance(values, list): values = values.split(' ') try: return MatchCondition(", "[] for val in names_or_ids: id_params = { 'subscription': subscription_id,", "{ 'subscription': subscription_id, 'resource_group': resource_group, 'namespace': resource_namespace, 'type': resource_type, 'name':", "ids.append(val) setattr(namespace, dest, ids if is_list else ids[0]) return _validate_name_or_id", "values = values.split(' ') try: return MatchCondition( match_variable=values[0], operator=values[1], match_value=values[2:]", "__call__(self, parser, namespace, values, option_string=None): match_condition = self.parse_match_condition(values) super(MatchConditionAction, self).__call__(parser,", "def get_name_or_id_validator(dest, child_type=None, resource_type='Frontdoors', resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'): def _validate_name_or_id(cmd, namespace): from", "Microsoft Corporation. All rights reserved. # Licensed under the MIT", "Licensed under the MIT License. See License.txt in the project", "argparse def get_name_or_id_validator(dest, child_type=None, resource_type='Frontdoors', resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'): def _validate_name_or_id(cmd, namespace):", "validate_keyvault(cmd, namespace): get_name_or_id_validator( dest='vault', resource_type='vaults', resource_namespace='Microsoft.Keyvault' )(cmd, namespace) def validate_load_balancing_settings(cmd,", "MatchCondition( match_variable=values[0], operator=values[1], match_value=values[2:] ) except IndexError: from knack.util import", "ids[0]) return _validate_name_or_id def validate_waf_policy(cmd, namespace): get_name_or_id_validator( dest='waf_policy', resource_type='WebApplicationFirewallPolicy' )(cmd,", "is_valid_resource_id, resource_id subscription_id = get_subscription_id(cmd.cli_ctx) resource_group = namespace.resource_group_name names_or_ids =", "[names_or_ids] if names_or_ids == [None] or not names_or_ids: return ids", "'name': getattr(namespace, resource_name_dest) if child_type else val, 'child_type_1': child_type, 'child_name_1':", "import MatchCondition if not isinstance(values, list): values = values.split(' ')", "IndexError: from knack.util import CLIError raise CLIError('usage error: --match-condition VARIABLE", "list, but convert back in the end if not isinstance(names_or_ids,", "resource_name_dest) if child_type else val, 'child_type_1': child_type, 'child_name_1': val if", "convert back in the end if not isinstance(names_or_ids, list): is_list", "rights reserved. # Licensed under the MIT License. See License.txt", "resource_type, 'name': getattr(namespace, resource_name_dest) if child_type else val, 'child_type_1': child_type,", "in the project root for license information. # -------------------------------------------------------------------------------------------- import", "= resource_id(**id_params) ids.append(val) setattr(namespace, dest, ids if is_list else ids[0])", "import get_subscription_id from msrestazure.tools import is_valid_resource_id, resource_id subscription_id = get_subscription_id(cmd.cli_ctx)", ") except IndexError: from knack.util import CLIError raise CLIError('usage error:", "VARIABLE OPERATOR [VALUE [VALUE ...]]') def __call__(self, parser, namespace, values,", "get_name_or_id_validator('rules_engine', 'rulesEngines')(cmd, namespace) # pylint: disable=protected-access class MatchConditionAction(argparse._AppendAction): # pylint:", "operator=values[1], match_value=values[2:] ) except IndexError: from knack.util import CLIError raise", "-------------------------------------------------------------------------------------------- import argparse def get_name_or_id_validator(dest, child_type=None, resource_type='Frontdoors', resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'): def", "in the end if not isinstance(names_or_ids, list): is_list = False", "resource_namespace='Microsoft.Keyvault' )(cmd, namespace) def validate_load_balancing_settings(cmd, namespace): get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd, namespace) def", "ids = [] for val in names_or_ids: id_params = {", "<reponame>Mannan2812/azure-cli-extensions<gh_stars>100-1000 # -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights", "a list, but convert back in the end if not", "namespace): get_name_or_id_validator( dest='vault', resource_type='vaults', resource_namespace='Microsoft.Keyvault' )(cmd, namespace) def validate_load_balancing_settings(cmd, namespace):", "end if not isinstance(names_or_ids, list): is_list = False names_or_ids =", "'rulesEngines')(cmd, namespace) # pylint: disable=protected-access class MatchConditionAction(argparse._AppendAction): # pylint: disable=no-self-use", "resource_type='vaults', resource_namespace='Microsoft.Keyvault' )(cmd, namespace) def validate_load_balancing_settings(cmd, namespace): get_name_or_id_validator('load_balancing_settings', 'loadBalancingSettings')(cmd, namespace)", "namespace): get_name_or_id_validator( dest='waf_policy', resource_type='WebApplicationFirewallPolicy' )(cmd, namespace) def validate_keyvault(cmd, namespace): get_name_or_id_validator(", "namespace) def validate_probe_settings(cmd, namespace): get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd, namespace) def validate_frontend_endpoints(cmd, namespace):", "# -------------------------------------------------------------------------------------------- import argparse def get_name_or_id_validator(dest, child_type=None, resource_type='Frontdoors', resource_namespace='Microsoft.Network', resource_name_dest='front_door_name'):", "else ids[0]) return _validate_name_or_id def validate_waf_policy(cmd, namespace): get_name_or_id_validator( dest='waf_policy', resource_type='WebApplicationFirewallPolicy'", "= [] for val in names_or_ids: id_params = { 'subscription':", "def validate_probe_settings(cmd, namespace): get_name_or_id_validator('probe_settings', 'healthProbeSettings')(cmd, namespace) def validate_frontend_endpoints(cmd, namespace): get_name_or_id_validator('frontend_endpoints',", "else val, 'child_type_1': child_type, 'child_name_1': val if child_type else None", "back in the end if not isinstance(names_or_ids, list): is_list =", "= [names_or_ids] if names_or_ids == [None] or not names_or_ids: return", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under", "def validate_keyvault(cmd, namespace): get_name_or_id_validator( dest='vault', resource_type='vaults', resource_namespace='Microsoft.Keyvault' )(cmd, namespace) def" ]
[ "\"stoic\", \"suave\", \"sycophant\", \"tirade\", \"tryst\", \"untenable\", \"vicarious\", \"vile\", \"waft\", \"zealous\",", "\"irritator\", \"isisaurus\", \"janenschia\", \"jaxartosaurus\", \"jingshanosaurus\", \"jinzhousaurus\", \"jobaria\", \"juravenator\", \"kentrosaurus\", \"khaan\",", "\"anserimimus\", \"antarctopelta\", \"antarctosaurus\", \"apatosaurus\", \"aragosaurus\", \"aralosaurus\", \"archaeoceratops\", \"archaeopteryx\", \"archaeornithomimus\", \"argentinosaurus\",", "\"hedonist\", \"heresy\", \"idyllic\", \"insidious\", \"junket\", \"kitsch\", \"litany\", \"lurid\", \"malaise\", \"malinger\",", "\"sinvenator\", \"zalmoxes\", \"zephyrosaurus\", \"zuniceratops\", \"byzantine\", \"svengali\", \"accolade\", \"acrimony\", \"angst\", \"anomaly\",", "[ \"Development\", \"Downloads\", \"Documents\", \"Music\", \"Video\", \"Work\", \"Pictures\", \"Desktop\", \"Study\",", "BSD 3-Clause License\", \"The BSD 2-Clause License\", \"GNU General Public", "(GPL)\", \"General Public License (LGPL)\", \"MIT License (MIT)\", \"Mozilla Public", "\"Windows 10\", \"Windows 7\", \"Windows 8\", \"Windows 8.1\", \"Zorin\", \"elementaryOS\",", "\"Julia\", \"Kotlin\", \"Lisp\", \"Lua\", \"Mathematica\", \"Objective-C\", \"OCaml\", \"Perl\", \"PHP\", \"PL-I\",", "\"PHP\", \"PL-I\", \"PL-SQL\", \"PowerShell\", \"Prolog\", \"Python\", \"R\", \"Racket\", \"Ruby\", \"Rust\",", "\"bagaceratops\", \"bambiraptor\", \"barapasaurus\", \"barosaurus\", \"baryonyx\", \"becklespinax\", \"beipiaosaurus\", \"bellusaurus\", \"borogovia\", \"brachiosaurus\",", "\"giganotosaurus\", \"gilmoreosaurus\", \"giraffatitan\", \"gobisaurus\", \"gorgosaurus\", \"goyocephale\", \"graciliceratops\", \"gryposaurus\", \"guaibasaurus\", \"guanlong\",", "\"lapparentosaurus\", \"leaellynasaura\", \"leptoceratops\", \"lesothosaurus\", \"lexovisaurus\", \"liaoceratops\", \"liaoxiornis\", \"ligabuesaurus\", \"liliensternus\", \"lophorhothon\",", "\"rebbachisaurus\", \"rhabdodon\", \"rhoetosaurus\", \"rinchenia\", \"riojasaurus\", \"rugops\", \"saichania\", \"saltasaurus\", \"saltopus\", \"sarcosaurus\",", "to the development.\"\"\" LICENSES = [ \"Apache License, 2.0 (Apache-2.0)\",", "\"chindesaurus\", \"chinshakiangosaurus\", \"chirostenotes\", \"chubutisaurus\", \"chungkingosaurus\", \"citipati\", \"coelophysis\", \"coelurus\", \"coloradisaurus\", \"compsognathus\",", "\"deinocheirus\", \"deinonychus\", \"deltadromeus\", \"diceratops\", \"dicraeosaurus\", \"dilophosaurus\", \"diplodocus\", \"dracorex\", \"dravidosaurus\", \"dromaeosaurus\",", "\"macOS\", \"OpenBSD\", \"PCLinuxOS\", \"Slackware\", \"Ubuntu\", \"Windows 10\", \"Windows 7\", \"Windows", "\"mercenary\", \"misnomer\", \"nirvana\", \"oblivion\", \"ogle\", \"ostracize\", \"panacea\", \"paradox\", \"peevish\", \"propriety\",", "\"Desktop\", \"Study\", ] PROJECT_NAMES = [ \"aardonyx\", \"abelisaurus\", \"achelousaurus\", \"achillobator\",", "\"Dart\", \"Delphi\", \"Dylan\", \"ECMAScript\", \"Elixir\", \"Emacs Lisp\", \"Erlang\", \"F#\", \"Falcon\",", "\"finagle\", \"glib\", \"harbinger\", \"hedonist\", \"heresy\", \"idyllic\", \"insidious\", \"junket\", \"kitsch\", \"litany\",", "\"guanlong\", \"hadrosaurus\", \"hagryphus\", \"haplocanthosaurus\", \"harpymimus\", \"herrerasaurus\", \"hesperosaurus\", \"heterodontosaurus\", \"homalocephale\", \"huayangosaurus\",", "\"brachiosaurus\", \"brachyceratops\", \"bugenasaura\", \"buitreraptor\", \"camarasaurus\", \"camptosaurus\", \"carnotaurus\", \"caudipteryx\", \"cedarpelta\", \"centrosaurus\",", "\"liaoxiornis\", \"ligabuesaurus\", \"liliensternus\", \"lophorhothon\", \"lophostropheus\", \"lufengosaurus\", \"lurdusaurus\", \"lycorhinus\", \"magyarosaurus\", \"maiasaura\",", "\"The BSD 3-Clause License\", \"The BSD 2-Clause License\", \"GNU General", "\"Documents\", \"Music\", \"Video\", \"Work\", \"Pictures\", \"Desktop\", \"Study\", ] PROJECT_NAMES =", "\"liliensternus\", \"lophorhothon\", \"lophostropheus\", \"lufengosaurus\", \"lurdusaurus\", \"lycorhinus\", \"magyarosaurus\", \"maiasaura\", \"majungatholus\", \"malawisaurus\",", "\"AutoIt\", \"Awk\", \"Bash\", \"C\", \"C Shell\", \"C#\", \"C++\", \"Caml\", \"Ceylon\",", "\"antarctosaurus\", \"apatosaurus\", \"aragosaurus\", \"aralosaurus\", \"archaeoceratops\", \"archaeopteryx\", \"archaeornithomimus\", \"argentinosaurus\", \"arrhinoceratops\", \"atlascopcosaurus\",", "\"peevish\", \"propriety\", \"revel\", \"rhetoric\", \"spartan\", \"stigma\", \"stoic\", \"suave\", \"sycophant\", \"tirade\",", "\"allosaurus\", \"alvarezsaurus\", \"amargasaurus\", \"ammosaurus\", \"ampelosaurus\", \"amygdalodon\", \"anatotitan\", \"anchiceratops\", \"anchisaurus\", \"ankylosaurus\",", "(EPL-1.0)\", ] PROGRAMMING_LANGS = [ \"ASP\", \"Assembly\", \"AutoIt\", \"Awk\", \"Bash\",", "\"othnielia\", \"ouranosaurus\", \"oviraptor\", \"rebbachisaurus\", \"rhabdodon\", \"rhoetosaurus\", \"rinchenia\", \"riojasaurus\", \"rugops\", \"saichania\",", "\"ankylosaurus\", \"anserimimus\", \"antarctopelta\", \"antarctosaurus\", \"apatosaurus\", \"aragosaurus\", \"aralosaurus\", \"archaeoceratops\", \"archaeopteryx\", \"archaeornithomimus\",", "\"dryosaurus\", \"dryptosaurus\", \"dubreuillosaurus\", \"edmontonia\", \"edmontosaurus\", \"einiosaurus\", \"elaphrosaurus\", \"emausaurus\", \"eolambia\", \"eoraptor\",", "\"acrocanthosaurus\", \"aegyptosaurus\", \"afrovenator\", \"agilisaurus\", \"alamosaurus\", \"albertaceratops\", \"albertosaurus\", \"alectrosaurus\", \"alioramus\", \"allosaurus\",", "\"magyarosaurus\", \"maiasaura\", \"majungatholus\", \"malawisaurus\", \"mamenchisaurus\", \"mapusaurus\", \"marshosaurus\", \"masiakasaurus\", \"massospondylus\", \"maxakalisaurus\",", "\"chasmosaurus\", \"chialingosaurus\", \"chindesaurus\", \"chinshakiangosaurus\", \"chirostenotes\", \"chubutisaurus\", \"chungkingosaurus\", \"citipati\", \"coelophysis\", \"coelurus\",", "\"saurolophus\", \"sauropelta\", \"saurophaganax\", \"saurornithoides\", \"scelidosaurus\", \"scutellosaurus\", \"secernosaurus\", \"segisaurus\", \"segnosaurus\", \"seismosaurus\",", "\"panacea\", \"paradox\", \"peevish\", \"propriety\", \"revel\", \"rhetoric\", \"spartan\", \"stigma\", \"stoic\", \"suave\",", "\"borogovia\", \"brachiosaurus\", \"brachyceratops\", \"bugenasaura\", \"buitreraptor\", \"camarasaurus\", \"camptosaurus\", \"carnotaurus\", \"caudipteryx\", \"cedarpelta\",", "\"Pictures\", \"Desktop\", \"Study\", ] PROJECT_NAMES = [ \"aardonyx\", \"abelisaurus\", \"achelousaurus\",", "\"gasparinisaura\", \"gastonia\", \"giganotosaurus\", \"gilmoreosaurus\", \"giraffatitan\", \"gobisaurus\", \"gorgosaurus\", \"goyocephale\", \"graciliceratops\", \"gryposaurus\",", "\"Ruby\", \"Rust\", \"Scala\", \"Scheme\", \"Smalltalk\", \"Tcl\", \"Tex\", \"Transact-SQL\", \"TypeScript\", \"Z", "\"omeisaurus\", \"ornitholestes\", \"ornithomimus\", \"orodromeus\", \"oryctodromeus\", \"othnielia\", \"ouranosaurus\", \"oviraptor\", \"rebbachisaurus\", \"rhabdodon\",", "\"nanotyrannus\", \"nanshiungosaurus\", \"nemegtosaurus\", \"neovenator\", \"neuquenosaurus\", \"nigersaurus\", \"nipponosaurus\", \"noasaurus\", \"nodosaurus\", \"nomingia\",", "\"diceratops\", \"dicraeosaurus\", \"dilophosaurus\", \"diplodocus\", \"dracorex\", \"dravidosaurus\", \"dromaeosaurus\", \"dromiceiomimus\", \"dryosaurus\", \"dryptosaurus\",", "\"mussaurus\", \"muttaburrasaurus\", \"nanotyrannus\", \"nanshiungosaurus\", \"nemegtosaurus\", \"neovenator\", \"neuquenosaurus\", \"nigersaurus\", \"nipponosaurus\", \"noasaurus\",", "\"Windows 8\", \"Windows 8.1\", \"Zorin\", \"elementaryOS\", \"macOS\", \"openSUSE\", ] FOLDERS", "\"bambiraptor\", \"barapasaurus\", \"barosaurus\", \"baryonyx\", \"becklespinax\", \"beipiaosaurus\", \"bellusaurus\", \"borogovia\", \"brachiosaurus\", \"brachyceratops\",", "\"Caml\", \"Ceylon\", \"Clojure\", \"CoffeeScript\", \"Common Lisp\", \"D\", \"Dart\", \"Delphi\", \"Dylan\",", "\"maxakalisaurus\", \"megalosaurus\", \"melanorosaurus\", \"metriacanthosaurus\", \"microceratops\", \"micropachycephalosaurus\", \"microraptor\", \"minmi\", \"monolophosaurus\", \"mononykus\",", "\"brogue\", \"brusque\", \"cacophony\", \"caustic\", \"charisma\", \"cloying\", \"deja-vu\", \"dichotomy\", \"elan\", \"ennui\",", "Public License (EPL-1.0)\", ] PROGRAMMING_LANGS = [ \"ASP\", \"Assembly\", \"AutoIt\",", "\"oryctodromeus\", \"othnielia\", \"ouranosaurus\", \"oviraptor\", \"rebbachisaurus\", \"rhabdodon\", \"rhoetosaurus\", \"rinchenia\", \"riojasaurus\", \"rugops\",", "\"datousaurus\", \"deinocheirus\", \"deinonychus\", \"deltadromeus\", \"diceratops\", \"dicraeosaurus\", \"dilophosaurus\", \"diplodocus\", \"dracorex\", \"dravidosaurus\",", "\"Slackware\", \"Ubuntu\", \"Windows 10\", \"Windows 7\", \"Windows 8\", \"Windows 8.1\",", "\"Prolog\", \"Python\", \"R\", \"Racket\", \"Ruby\", \"Rust\", \"Scala\", \"Scheme\", \"Smalltalk\", \"Tcl\",", "\"Awk\", \"Bash\", \"C\", \"C Shell\", \"C#\", \"C++\", \"Caml\", \"Ceylon\", \"Clojure\",", "\"byzantine\", \"svengali\", \"accolade\", \"acrimony\", \"angst\", \"anomaly\", \"antidote\", \"baroque\", \"bona_fide\", \"bourgeois\",", "\"nanshiungosaurus\", \"nemegtosaurus\", \"neovenator\", \"neuquenosaurus\", \"nigersaurus\", \"nipponosaurus\", \"noasaurus\", \"nodosaurus\", \"nomingia\", \"nothronychus\",", "\"ostracize\", \"panacea\", \"paradox\", \"peevish\", \"propriety\", \"revel\", \"rhetoric\", \"spartan\", \"stigma\", \"stoic\",", "\"neovenator\", \"neuquenosaurus\", \"nigersaurus\", \"nipponosaurus\", \"noasaurus\", \"nodosaurus\", \"nomingia\", \"nothronychus\", \"nqwebasaurus\", \"omeisaurus\",", "\"secernosaurus\", \"segisaurus\", \"segnosaurus\", \"seismosaurus\", \"shamosaurus\", \"shanag\", \"shantungosaurus\", \"shunosaurus\", \"shuvuuia\", \"silvisaurus\",", "\"stigma\", \"stoic\", \"suave\", \"sycophant\", \"tirade\", \"tryst\", \"untenable\", \"vicarious\", \"vile\", \"waft\",", "\"kotasaurus\", \"kritosaurus\", \"lamaceratops\", \"lambeosaurus\", \"lapparentosaurus\", \"leaellynasaura\", \"leptoceratops\", \"lesothosaurus\", \"lexovisaurus\", \"liaoceratops\",", "\"jaxartosaurus\", \"jingshanosaurus\", \"jinzhousaurus\", \"jobaria\", \"juravenator\", \"kentrosaurus\", \"khaan\", \"kotasaurus\", \"kritosaurus\", \"lamaceratops\",", "\"Falcon\", \"Fortran\", \"GNU Octave\", \"Go\", \"Groovy\", \"Haskell\", \"haXe\", \"Io\", \"J#\",", "\"becklespinax\", \"beipiaosaurus\", \"bellusaurus\", \"borogovia\", \"brachiosaurus\", \"brachyceratops\", \"bugenasaura\", \"buitreraptor\", \"camarasaurus\", \"camptosaurus\",", "\"lesothosaurus\", \"lexovisaurus\", \"liaoceratops\", \"liaoxiornis\", \"ligabuesaurus\", \"liliensternus\", \"lophorhothon\", \"lophostropheus\", \"lufengosaurus\", \"lurdusaurus\",", "shell\", ] OS = [ \"Arch\", \"CentOS\", \"Debian\", \"Fedora\", \"FreeBSD\",", "\"Bash\", \"C\", \"C Shell\", \"C#\", \"C++\", \"Caml\", \"Ceylon\", \"Clojure\", \"CoffeeScript\",", "pas\", \"fiasco\", \"finagle\", \"glib\", \"harbinger\", \"hedonist\", \"heresy\", \"idyllic\", \"insidious\", \"junket\",", "\"dilophosaurus\", \"diplodocus\", \"dracorex\", \"dravidosaurus\", \"dromaeosaurus\", \"dromiceiomimus\", \"dryosaurus\", \"dryptosaurus\", \"dubreuillosaurus\", \"edmontonia\",", "\"leaellynasaura\", \"leptoceratops\", \"lesothosaurus\", \"lexovisaurus\", \"liaoceratops\", \"liaoxiornis\", \"ligabuesaurus\", \"liliensternus\", \"lophorhothon\", \"lophostropheus\",", "\"albertosaurus\", \"alectrosaurus\", \"alioramus\", \"allosaurus\", \"alvarezsaurus\", \"amargasaurus\", \"ammosaurus\", \"ampelosaurus\", \"amygdalodon\", \"anatotitan\",", "\"nomingia\", \"nothronychus\", \"nqwebasaurus\", \"omeisaurus\", \"ornitholestes\", \"ornithomimus\", \"orodromeus\", \"oryctodromeus\", \"othnielia\", \"ouranosaurus\",", "PROGRAMMING_LANGS = [ \"ASP\", \"Assembly\", \"AutoIt\", \"Awk\", \"Bash\", \"C\", \"C", "\"deinonychus\", \"deltadromeus\", \"diceratops\", \"dicraeosaurus\", \"dilophosaurus\", \"diplodocus\", \"dracorex\", \"dravidosaurus\", \"dromaeosaurus\", \"dromiceiomimus\",", "\"antarctopelta\", \"antarctosaurus\", \"apatosaurus\", \"aragosaurus\", \"aralosaurus\", \"archaeoceratops\", \"archaeopteryx\", \"archaeornithomimus\", \"argentinosaurus\", \"arrhinoceratops\",", "\"segnosaurus\", \"seismosaurus\", \"shamosaurus\", \"shanag\", \"shantungosaurus\", \"shunosaurus\", \"shuvuuia\", \"silvisaurus\", \"sinocalliopteryx\", \"sinornithosaurus\",", "\"aegyptosaurus\", \"afrovenator\", \"agilisaurus\", \"alamosaurus\", \"albertaceratops\", \"albertosaurus\", \"alectrosaurus\", \"alioramus\", \"allosaurus\", \"alvarezsaurus\",", "\"Common Lisp\", \"D\", \"Dart\", \"Delphi\", \"Dylan\", \"ECMAScript\", \"Elixir\", \"Emacs Lisp\",", "\"elementaryOS\", \"macOS\", \"openSUSE\", ] FOLDERS = [ \"Development\", \"Downloads\", \"Documents\",", "\"lurdusaurus\", \"lycorhinus\", \"magyarosaurus\", \"maiasaura\", \"majungatholus\", \"malawisaurus\", \"mamenchisaurus\", \"mapusaurus\", \"marshosaurus\", \"masiakasaurus\",", "\"achillobator\", \"acrocanthosaurus\", \"aegyptosaurus\", \"afrovenator\", \"agilisaurus\", \"alamosaurus\", \"albertaceratops\", \"albertosaurus\", \"alectrosaurus\", \"alioramus\",", "\"rhetoric\", \"spartan\", \"stigma\", \"stoic\", \"suave\", \"sycophant\", \"tirade\", \"tryst\", \"untenable\", \"vicarious\",", "\"indosuchus\", \"ingenia\", \"irritator\", \"isisaurus\", \"janenschia\", \"jaxartosaurus\", \"jingshanosaurus\", \"jinzhousaurus\", \"jobaria\", \"juravenator\",", "\"anchisaurus\", \"ankylosaurus\", \"anserimimus\", \"antarctopelta\", \"antarctosaurus\", \"apatosaurus\", \"aragosaurus\", \"aralosaurus\", \"archaeoceratops\", \"archaeopteryx\",", "\"elaphrosaurus\", \"emausaurus\", \"eolambia\", \"eoraptor\", \"eotyrannus\", \"equijubus\", \"erketu\", \"erlikosaurus\", \"euhelopus\", \"euoplocephalus\",", "\"OCaml\", \"Perl\", \"PHP\", \"PL-I\", \"PL-SQL\", \"PowerShell\", \"Prolog\", \"Python\", \"R\", \"Racket\",", "[ \"aardonyx\", \"abelisaurus\", \"achelousaurus\", \"achillobator\", \"acrocanthosaurus\", \"aegyptosaurus\", \"afrovenator\", \"agilisaurus\", \"alamosaurus\",", "\"chinshakiangosaurus\", \"chirostenotes\", \"chubutisaurus\", \"chungkingosaurus\", \"citipati\", \"coelophysis\", \"coelurus\", \"coloradisaurus\", \"compsognathus\", \"conchoraptor\",", "\"oviraptor\", \"rebbachisaurus\", \"rhabdodon\", \"rhoetosaurus\", \"rinchenia\", \"riojasaurus\", \"rugops\", \"saichania\", \"saltasaurus\", \"saltopus\",", "\"euoplocephalus\", \"europasaurus\", \"euskelosaurus\", \"eustreptospondylus\", \"fukuiraptor\", \"fukuisaurus\", \"gallimimus\", \"gargoyleosaurus\", \"garudimimus\", \"gasosaurus\",", "\"Eclipse Public License (EPL-1.0)\", ] PROGRAMMING_LANGS = [ \"ASP\", \"Assembly\",", "\"azendohsaurus\", \"bactrosaurus\", \"bagaceratops\", \"bambiraptor\", \"barapasaurus\", \"barosaurus\", \"baryonyx\", \"becklespinax\", \"beipiaosaurus\", \"bellusaurus\",", "\"Gentoo\", \"Kali\", \"Lubuntu\", \"Manjaro\", \"Mint\", \"OS X\", \"macOS\", \"OpenBSD\", \"PCLinuxOS\",", "\"harpymimus\", \"herrerasaurus\", \"hesperosaurus\", \"heterodontosaurus\", \"homalocephale\", \"huayangosaurus\", \"hylaeosaurus\", \"hypacrosaurus\", \"hypselosaurus\", \"hypsilophodon\",", "\"glib\", \"harbinger\", \"hedonist\", \"heresy\", \"idyllic\", \"insidious\", \"junket\", \"kitsch\", \"litany\", \"lurid\",", "\"macOS\", \"openSUSE\", ] FOLDERS = [ \"Development\", \"Downloads\", \"Documents\", \"Music\",", "\"caustic\", \"charisma\", \"cloying\", \"deja-vu\", \"dichotomy\", \"elan\", \"ennui\", \"epitome\", \"esoteric\", \"euphemism\",", "\"cedarpelta\", \"centrosaurus\", \"ceratosaurus\", \"cetiosauriscus\", \"cetiosaurus\", \"chaoyangsaurus\", \"chasmosaurus\", \"chialingosaurus\", \"chindesaurus\", \"chinshakiangosaurus\",", "\"saurornithoides\", \"scelidosaurus\", \"scutellosaurus\", \"secernosaurus\", \"segisaurus\", \"segnosaurus\", \"seismosaurus\", \"shamosaurus\", \"shanag\", \"shantungosaurus\",", "\"Haskell\", \"haXe\", \"Io\", \"J#\", \"Java\", \"JavaScript\", \"Julia\", \"Kotlin\", \"Lisp\", \"Lua\",", "\"Apache License, 2.0 (Apache-2.0)\", \"The BSD 3-Clause License\", \"The BSD", "\"alioramus\", \"allosaurus\", \"alvarezsaurus\", \"amargasaurus\", \"ammosaurus\", \"ampelosaurus\", \"amygdalodon\", \"anatotitan\", \"anchiceratops\", \"anchisaurus\",", "\"massospondylus\", \"maxakalisaurus\", \"megalosaurus\", \"melanorosaurus\", \"metriacanthosaurus\", \"microceratops\", \"micropachycephalosaurus\", \"microraptor\", \"minmi\", \"monolophosaurus\",", "License 2.0 (MPL-2.0)\", \"Common Development and Distribution License (CDDL-1.0)\", \"Eclipse", "\"MIT License (MIT)\", \"Mozilla Public License 2.0 (MPL-2.0)\", \"Common Development", "\"Perl\", \"PHP\", \"PL-I\", \"PL-SQL\", \"PowerShell\", \"Prolog\", \"Python\", \"R\", \"Racket\", \"Ruby\",", "General Public License (GPL)\", \"General Public License (LGPL)\", \"MIT License", "\"amargasaurus\", \"ammosaurus\", \"ampelosaurus\", \"amygdalodon\", \"anatotitan\", \"anchiceratops\", \"anchisaurus\", \"ankylosaurus\", \"anserimimus\", \"antarctopelta\",", "\"conchoraptor\", \"confuciusornis\", \"corythosaurus\", \"cryolophosaurus\", \"dacentrurus\", \"daspletosaurus\", \"datousaurus\", \"deinocheirus\", \"deinonychus\", \"deltadromeus\",", "\"Work\", \"Pictures\", \"Desktop\", \"Study\", ] PROJECT_NAMES = [ \"aardonyx\", \"abelisaurus\",", "Public License 2.0 (MPL-2.0)\", \"Common Development and Distribution License (CDDL-1.0)\",", "= [ \"Arch\", \"CentOS\", \"Debian\", \"Fedora\", \"FreeBSD\", \"Gentoo\", \"Kali\", \"Lubuntu\",", "\"Mathematica\", \"Objective-C\", \"OCaml\", \"Perl\", \"PHP\", \"PL-I\", \"PL-SQL\", \"PowerShell\", \"Prolog\", \"Python\",", "\"sarcosaurus\", \"saurolophus\", \"sauropelta\", \"saurophaganax\", \"saurornithoides\", \"scelidosaurus\", \"scutellosaurus\", \"secernosaurus\", \"segisaurus\", \"segnosaurus\",", "\"janenschia\", \"jaxartosaurus\", \"jingshanosaurus\", \"jinzhousaurus\", \"jobaria\", \"juravenator\", \"kentrosaurus\", \"khaan\", \"kotasaurus\", \"kritosaurus\",", "\"Ubuntu\", \"Windows 10\", \"Windows 7\", \"Windows 8\", \"Windows 8.1\", \"Zorin\",", "\"brusque\", \"cacophony\", \"caustic\", \"charisma\", \"cloying\", \"deja-vu\", \"dichotomy\", \"elan\", \"ennui\", \"epitome\",", "Distribution License (CDDL-1.0)\", \"Eclipse Public License (EPL-1.0)\", ] PROGRAMMING_LANGS =", "\"Ceylon\", \"Clojure\", \"CoffeeScript\", \"Common Lisp\", \"D\", \"Dart\", \"Delphi\", \"Dylan\", \"ECMAScript\",", "[ \"Apache License, 2.0 (Apache-2.0)\", \"The BSD 3-Clause License\", \"The", "\"Io\", \"J#\", \"Java\", \"JavaScript\", \"Julia\", \"Kotlin\", \"Lisp\", \"Lua\", \"Mathematica\", \"Objective-C\",", "\"muttaburrasaurus\", \"nanotyrannus\", \"nanshiungosaurus\", \"nemegtosaurus\", \"neovenator\", \"neuquenosaurus\", \"nigersaurus\", \"nipponosaurus\", \"noasaurus\", \"nodosaurus\",", "\"TypeScript\", \"Z shell\", ] OS = [ \"Arch\", \"CentOS\", \"Debian\",", "\"malinger\", \"mantra\", \"maudlin\", \"mercenary\", \"misnomer\", \"nirvana\", \"oblivion\", \"ogle\", \"ostracize\", \"panacea\",", "\"majungatholus\", \"malawisaurus\", \"mamenchisaurus\", \"mapusaurus\", \"marshosaurus\", \"masiakasaurus\", \"massospondylus\", \"maxakalisaurus\", \"megalosaurus\", \"melanorosaurus\",", "\"spartan\", \"stigma\", \"stoic\", \"suave\", \"sycophant\", \"tirade\", \"tryst\", \"untenable\", \"vicarious\", \"vile\",", "\"herrerasaurus\", \"hesperosaurus\", \"heterodontosaurus\", \"homalocephale\", \"huayangosaurus\", \"hylaeosaurus\", \"hypacrosaurus\", \"hypselosaurus\", \"hypsilophodon\", \"iguanodon\",", "\"nigersaurus\", \"nipponosaurus\", \"noasaurus\", \"nodosaurus\", \"nomingia\", \"nothronychus\", \"nqwebasaurus\", \"omeisaurus\", \"ornitholestes\", \"ornithomimus\",", "\"ornithomimus\", \"orodromeus\", \"oryctodromeus\", \"othnielia\", \"ouranosaurus\", \"oviraptor\", \"rebbachisaurus\", \"rhabdodon\", \"rhoetosaurus\", \"rinchenia\",", "\"eolambia\", \"eoraptor\", \"eotyrannus\", \"equijubus\", \"erketu\", \"erlikosaurus\", \"euhelopus\", \"euoplocephalus\", \"europasaurus\", \"euskelosaurus\",", "\"CentOS\", \"Debian\", \"Fedora\", \"FreeBSD\", \"Gentoo\", \"Kali\", \"Lubuntu\", \"Manjaro\", \"Mint\", \"OS", "BSD 2-Clause License\", \"GNU General Public License (GPL)\", \"General Public", "\"paradox\", \"peevish\", \"propriety\", \"revel\", \"rhetoric\", \"spartan\", \"stigma\", \"stoic\", \"suave\", \"sycophant\",", "\"angst\", \"anomaly\", \"antidote\", \"baroque\", \"bona_fide\", \"bourgeois\", \"bravado\", \"brogue\", \"brusque\", \"cacophony\",", "\"hypselosaurus\", \"hypsilophodon\", \"iguanodon\", \"indosuchus\", \"ingenia\", \"irritator\", \"isisaurus\", \"janenschia\", \"jaxartosaurus\", \"jingshanosaurus\",", "\"gasosaurus\", \"gasparinisaura\", \"gastonia\", \"giganotosaurus\", \"gilmoreosaurus\", \"giraffatitan\", \"gobisaurus\", \"gorgosaurus\", \"goyocephale\", \"graciliceratops\",", "\"D\", \"Dart\", \"Delphi\", \"Dylan\", \"ECMAScript\", \"Elixir\", \"Emacs Lisp\", \"Erlang\", \"F#\",", "\"microraptor\", \"minmi\", \"monolophosaurus\", \"mononykus\", \"mussaurus\", \"muttaburrasaurus\", \"nanotyrannus\", \"nanshiungosaurus\", \"nemegtosaurus\", \"neovenator\",", "\"GNU Octave\", \"Go\", \"Groovy\", \"Haskell\", \"haXe\", \"Io\", \"J#\", \"Java\", \"JavaScript\",", "\"Python\", \"R\", \"Racket\", \"Ruby\", \"Rust\", \"Scala\", \"Scheme\", \"Smalltalk\", \"Tcl\", \"Tex\",", "\"FreeBSD\", \"Gentoo\", \"Kali\", \"Lubuntu\", \"Manjaro\", \"Mint\", \"OS X\", \"macOS\", \"OpenBSD\",", "\"dacentrurus\", \"daspletosaurus\", \"datousaurus\", \"deinocheirus\", \"deinonychus\", \"deltadromeus\", \"diceratops\", \"dicraeosaurus\", \"dilophosaurus\", \"diplodocus\",", "\"Racket\", \"Ruby\", \"Rust\", \"Scala\", \"Scheme\", \"Smalltalk\", \"Tcl\", \"Tex\", \"Transact-SQL\", \"TypeScript\",", "= [ \"ASP\", \"Assembly\", \"AutoIt\", \"Awk\", \"Bash\", \"C\", \"C Shell\",", "\"shamosaurus\", \"shanag\", \"shantungosaurus\", \"shunosaurus\", \"shuvuuia\", \"silvisaurus\", \"sinocalliopteryx\", \"sinornithosaurus\", \"sinosauropteryx\", \"sinraptor\",", "\"liaoceratops\", \"liaoxiornis\", \"ligabuesaurus\", \"liliensternus\", \"lophorhothon\", \"lophostropheus\", \"lufengosaurus\", \"lurdusaurus\", \"lycorhinus\", \"magyarosaurus\",", "\"compsognathus\", \"conchoraptor\", \"confuciusornis\", \"corythosaurus\", \"cryolophosaurus\", \"dacentrurus\", \"daspletosaurus\", \"datousaurus\", \"deinocheirus\", \"deinonychus\",", "\"Mozilla Public License 2.0 (MPL-2.0)\", \"Common Development and Distribution License", "\"bourgeois\", \"bravado\", \"brogue\", \"brusque\", \"cacophony\", \"caustic\", \"charisma\", \"cloying\", \"deja-vu\", \"dichotomy\",", "\"euskelosaurus\", \"eustreptospondylus\", \"fukuiraptor\", \"fukuisaurus\", \"gallimimus\", \"gargoyleosaurus\", \"garudimimus\", \"gasosaurus\", \"gasparinisaura\", \"gastonia\",", "\"gorgosaurus\", \"goyocephale\", \"graciliceratops\", \"gryposaurus\", \"guaibasaurus\", \"guanlong\", \"hadrosaurus\", \"hagryphus\", \"haplocanthosaurus\", \"harpymimus\",", "\"Windows 7\", \"Windows 8\", \"Windows 8.1\", \"Zorin\", \"elementaryOS\", \"macOS\", \"openSUSE\",", "Public License (GPL)\", \"General Public License (LGPL)\", \"MIT License (MIT)\",", "\"ouranosaurus\", \"oviraptor\", \"rebbachisaurus\", \"rhabdodon\", \"rhoetosaurus\", \"rinchenia\", \"riojasaurus\", \"rugops\", \"saichania\", \"saltasaurus\",", "2.0 (Apache-2.0)\", \"The BSD 3-Clause License\", \"The BSD 2-Clause License\",", "\"harbinger\", \"hedonist\", \"heresy\", \"idyllic\", \"insidious\", \"junket\", \"kitsch\", \"litany\", \"lurid\", \"malaise\",", "\"mamenchisaurus\", \"mapusaurus\", \"marshosaurus\", \"masiakasaurus\", \"massospondylus\", \"maxakalisaurus\", \"megalosaurus\", \"melanorosaurus\", \"metriacanthosaurus\", \"microceratops\",", "\"seismosaurus\", \"shamosaurus\", \"shanag\", \"shantungosaurus\", \"shunosaurus\", \"shuvuuia\", \"silvisaurus\", \"sinocalliopteryx\", \"sinornithosaurus\", \"sinosauropteryx\",", "\"silvisaurus\", \"sinocalliopteryx\", \"sinornithosaurus\", \"sinosauropteryx\", \"sinraptor\", \"sinvenator\", \"zalmoxes\", \"zephyrosaurus\", \"zuniceratops\", \"byzantine\",", "\"Dylan\", \"ECMAScript\", \"Elixir\", \"Emacs Lisp\", \"Erlang\", \"F#\", \"Falcon\", \"Fortran\", \"GNU", "\"F#\", \"Falcon\", \"Fortran\", \"GNU Octave\", \"Go\", \"Groovy\", \"Haskell\", \"haXe\", \"Io\",", "\"sinocalliopteryx\", \"sinornithosaurus\", \"sinosauropteryx\", \"sinraptor\", \"sinvenator\", \"zalmoxes\", \"zephyrosaurus\", \"zuniceratops\", \"byzantine\", \"svengali\",", "\"Development\", \"Downloads\", \"Documents\", \"Music\", \"Video\", \"Work\", \"Pictures\", \"Desktop\", \"Study\", ]", "\"dravidosaurus\", \"dromaeosaurus\", \"dromiceiomimus\", \"dryosaurus\", \"dryptosaurus\", \"dubreuillosaurus\", \"edmontonia\", \"edmontosaurus\", \"einiosaurus\", \"elaphrosaurus\",", "\"Downloads\", \"Documents\", \"Music\", \"Video\", \"Work\", \"Pictures\", \"Desktop\", \"Study\", ] PROJECT_NAMES", "\"OpenBSD\", \"PCLinuxOS\", \"Slackware\", \"Ubuntu\", \"Windows 10\", \"Windows 7\", \"Windows 8\",", "License, 2.0 (Apache-2.0)\", \"The BSD 3-Clause License\", \"The BSD 2-Clause", "\"ingenia\", \"irritator\", \"isisaurus\", \"janenschia\", \"jaxartosaurus\", \"jingshanosaurus\", \"jinzhousaurus\", \"jobaria\", \"juravenator\", \"kentrosaurus\",", "\"kitsch\", \"litany\", \"lurid\", \"malaise\", \"malinger\", \"mantra\", \"maudlin\", \"mercenary\", \"misnomer\", \"nirvana\",", "\"litany\", \"lurid\", \"malaise\", \"malinger\", \"mantra\", \"maudlin\", \"mercenary\", \"misnomer\", \"nirvana\", \"oblivion\",", "\"kentrosaurus\", \"khaan\", \"kotasaurus\", \"kritosaurus\", \"lamaceratops\", \"lambeosaurus\", \"lapparentosaurus\", \"leaellynasaura\", \"leptoceratops\", \"lesothosaurus\",", "\"mapusaurus\", \"marshosaurus\", \"masiakasaurus\", \"massospondylus\", \"maxakalisaurus\", \"megalosaurus\", \"melanorosaurus\", \"metriacanthosaurus\", \"microceratops\", \"micropachycephalosaurus\",", "\"barapasaurus\", \"barosaurus\", \"baryonyx\", \"becklespinax\", \"beipiaosaurus\", \"bellusaurus\", \"borogovia\", \"brachiosaurus\", \"brachyceratops\", \"bugenasaura\",", "(CDDL-1.0)\", \"Eclipse Public License (EPL-1.0)\", ] PROGRAMMING_LANGS = [ \"ASP\",", "\"avalonia\", \"avimimus\", \"azendohsaurus\", \"bactrosaurus\", \"bagaceratops\", \"bambiraptor\", \"barapasaurus\", \"barosaurus\", \"baryonyx\", \"becklespinax\",", "OS = [ \"Arch\", \"CentOS\", \"Debian\", \"Fedora\", \"FreeBSD\", \"Gentoo\", \"Kali\",", "\"shunosaurus\", \"shuvuuia\", \"silvisaurus\", \"sinocalliopteryx\", \"sinornithosaurus\", \"sinosauropteryx\", \"sinraptor\", \"sinvenator\", \"zalmoxes\", \"zephyrosaurus\",", "\"gobisaurus\", \"gorgosaurus\", \"goyocephale\", \"graciliceratops\", \"gryposaurus\", \"guaibasaurus\", \"guanlong\", \"hadrosaurus\", \"hagryphus\", \"haplocanthosaurus\",", "\"nemegtosaurus\", \"neovenator\", \"neuquenosaurus\", \"nigersaurus\", \"nipponosaurus\", \"noasaurus\", \"nodosaurus\", \"nomingia\", \"nothronychus\", \"nqwebasaurus\",", "\"jingshanosaurus\", \"jinzhousaurus\", \"jobaria\", \"juravenator\", \"kentrosaurus\", \"khaan\", \"kotasaurus\", \"kritosaurus\", \"lamaceratops\", \"lambeosaurus\",", "\"alvarezsaurus\", \"amargasaurus\", \"ammosaurus\", \"ampelosaurus\", \"amygdalodon\", \"anatotitan\", \"anchiceratops\", \"anchisaurus\", \"ankylosaurus\", \"anserimimus\",", "\"Kali\", \"Lubuntu\", \"Manjaro\", \"Mint\", \"OS X\", \"macOS\", \"OpenBSD\", \"PCLinuxOS\", \"Slackware\",", "\"cloying\", \"deja-vu\", \"dichotomy\", \"elan\", \"ennui\", \"epitome\", \"esoteric\", \"euphemism\", \"faux pas\",", "License\", \"GNU General Public License (GPL)\", \"General Public License (LGPL)\",", "\"minmi\", \"monolophosaurus\", \"mononykus\", \"mussaurus\", \"muttaburrasaurus\", \"nanotyrannus\", \"nanshiungosaurus\", \"nemegtosaurus\", \"neovenator\", \"neuquenosaurus\",", "\"carnotaurus\", \"caudipteryx\", \"cedarpelta\", \"centrosaurus\", \"ceratosaurus\", \"cetiosauriscus\", \"cetiosaurus\", \"chaoyangsaurus\", \"chasmosaurus\", \"chialingosaurus\",", "\"J#\", \"Java\", \"JavaScript\", \"Julia\", \"Kotlin\", \"Lisp\", \"Lua\", \"Mathematica\", \"Objective-C\", \"OCaml\",", "\"dicraeosaurus\", \"dilophosaurus\", \"diplodocus\", \"dracorex\", \"dravidosaurus\", \"dromaeosaurus\", \"dromiceiomimus\", \"dryosaurus\", \"dryptosaurus\", \"dubreuillosaurus\",", "\"sinraptor\", \"sinvenator\", \"zalmoxes\", \"zephyrosaurus\", \"zuniceratops\", \"byzantine\", \"svengali\", \"accolade\", \"acrimony\", \"angst\",", "\"saltasaurus\", \"saltopus\", \"sarcosaurus\", \"saurolophus\", \"sauropelta\", \"saurophaganax\", \"saurornithoides\", \"scelidosaurus\", \"scutellosaurus\", \"secernosaurus\",", "\"equijubus\", \"erketu\", \"erlikosaurus\", \"euhelopus\", \"euoplocephalus\", \"europasaurus\", \"euskelosaurus\", \"eustreptospondylus\", \"fukuiraptor\", \"fukuisaurus\",", "\"isisaurus\", \"janenschia\", \"jaxartosaurus\", \"jingshanosaurus\", \"jinzhousaurus\", \"jobaria\", \"juravenator\", \"kentrosaurus\", \"khaan\", \"kotasaurus\",", "\"ennui\", \"epitome\", \"esoteric\", \"euphemism\", \"faux pas\", \"fiasco\", \"finagle\", \"glib\", \"harbinger\",", "License (LGPL)\", \"MIT License (MIT)\", \"Mozilla Public License 2.0 (MPL-2.0)\",", "\"garudimimus\", \"gasosaurus\", \"gasparinisaura\", \"gastonia\", \"giganotosaurus\", \"gilmoreosaurus\", \"giraffatitan\", \"gobisaurus\", \"gorgosaurus\", \"goyocephale\",", "\"chirostenotes\", \"chubutisaurus\", \"chungkingosaurus\", \"citipati\", \"coelophysis\", \"coelurus\", \"coloradisaurus\", \"compsognathus\", \"conchoraptor\", \"confuciusornis\",", "\"camptosaurus\", \"carnotaurus\", \"caudipteryx\", \"cedarpelta\", \"centrosaurus\", \"ceratosaurus\", \"cetiosauriscus\", \"cetiosaurus\", \"chaoyangsaurus\", \"chasmosaurus\",", "\"ampelosaurus\", \"amygdalodon\", \"anatotitan\", \"anchiceratops\", \"anchisaurus\", \"ankylosaurus\", \"anserimimus\", \"antarctopelta\", \"antarctosaurus\", \"apatosaurus\",", "\"marshosaurus\", \"masiakasaurus\", \"massospondylus\", \"maxakalisaurus\", \"megalosaurus\", \"melanorosaurus\", \"metriacanthosaurus\", \"microceratops\", \"micropachycephalosaurus\", \"microraptor\",", "\"Kotlin\", \"Lisp\", \"Lua\", \"Mathematica\", \"Objective-C\", \"OCaml\", \"Perl\", \"PHP\", \"PL-I\", \"PL-SQL\",", "\"dichotomy\", \"elan\", \"ennui\", \"epitome\", \"esoteric\", \"euphemism\", \"faux pas\", \"fiasco\", \"finagle\",", "\"argentinosaurus\", \"arrhinoceratops\", \"atlascopcosaurus\", \"aucasaurus\", \"austrosaurus\", \"avaceratops\", \"avalonia\", \"avimimus\", \"azendohsaurus\", \"bactrosaurus\",", "\"lambeosaurus\", \"lapparentosaurus\", \"leaellynasaura\", \"leptoceratops\", \"lesothosaurus\", \"lexovisaurus\", \"liaoceratops\", \"liaoxiornis\", \"ligabuesaurus\", \"liliensternus\",", "8\", \"Windows 8.1\", \"Zorin\", \"elementaryOS\", \"macOS\", \"openSUSE\", ] FOLDERS =", "\"fukuisaurus\", \"gallimimus\", \"gargoyleosaurus\", \"garudimimus\", \"gasosaurus\", \"gasparinisaura\", \"gastonia\", \"giganotosaurus\", \"gilmoreosaurus\", \"giraffatitan\",", "\"goyocephale\", \"graciliceratops\", \"gryposaurus\", \"guaibasaurus\", \"guanlong\", \"hadrosaurus\", \"hagryphus\", \"haplocanthosaurus\", \"harpymimus\", \"herrerasaurus\",", "\"Study\", ] PROJECT_NAMES = [ \"aardonyx\", \"abelisaurus\", \"achelousaurus\", \"achillobator\", \"acrocanthosaurus\",", "\"Common Development and Distribution License (CDDL-1.0)\", \"Eclipse Public License (EPL-1.0)\",", "\"einiosaurus\", \"elaphrosaurus\", \"emausaurus\", \"eolambia\", \"eoraptor\", \"eotyrannus\", \"equijubus\", \"erketu\", \"erlikosaurus\", \"euhelopus\",", "the development.\"\"\" LICENSES = [ \"Apache License, 2.0 (Apache-2.0)\", \"The", "\"aucasaurus\", \"austrosaurus\", \"avaceratops\", \"avalonia\", \"avimimus\", \"azendohsaurus\", \"bactrosaurus\", \"bagaceratops\", \"bambiraptor\", \"barapasaurus\",", "\"bona_fide\", \"bourgeois\", \"bravado\", \"brogue\", \"brusque\", \"cacophony\", \"caustic\", \"charisma\", \"cloying\", \"deja-vu\",", "\"Lisp\", \"Lua\", \"Mathematica\", \"Objective-C\", \"OCaml\", \"Perl\", \"PHP\", \"PL-I\", \"PL-SQL\", \"PowerShell\",", "] OS = [ \"Arch\", \"CentOS\", \"Debian\", \"Fedora\", \"FreeBSD\", \"Gentoo\",", "2-Clause License\", \"GNU General Public License (GPL)\", \"General Public License", "\"Fedora\", \"FreeBSD\", \"Gentoo\", \"Kali\", \"Lubuntu\", \"Manjaro\", \"Mint\", \"OS X\", \"macOS\",", "\"Arch\", \"CentOS\", \"Debian\", \"Fedora\", \"FreeBSD\", \"Gentoo\", \"Kali\", \"Lubuntu\", \"Manjaro\", \"Mint\",", "\"svengali\", \"accolade\", \"acrimony\", \"angst\", \"anomaly\", \"antidote\", \"baroque\", \"bona_fide\", \"bourgeois\", \"bravado\",", "\"citipati\", \"coelophysis\", \"coelurus\", \"coloradisaurus\", \"compsognathus\", \"conchoraptor\", \"confuciusornis\", \"corythosaurus\", \"cryolophosaurus\", \"dacentrurus\",", "\"lamaceratops\", \"lambeosaurus\", \"lapparentosaurus\", \"leaellynasaura\", \"leptoceratops\", \"lesothosaurus\", \"lexovisaurus\", \"liaoceratops\", \"liaoxiornis\", \"ligabuesaurus\",", "\"lophostropheus\", \"lufengosaurus\", \"lurdusaurus\", \"lycorhinus\", \"magyarosaurus\", \"maiasaura\", \"majungatholus\", \"malawisaurus\", \"mamenchisaurus\", \"mapusaurus\",", "\"elan\", \"ennui\", \"epitome\", \"esoteric\", \"euphemism\", \"faux pas\", \"fiasco\", \"finagle\", \"glib\",", "\"Assembly\", \"AutoIt\", \"Awk\", \"Bash\", \"C\", \"C Shell\", \"C#\", \"C++\", \"Caml\",", "\"anchiceratops\", \"anchisaurus\", \"ankylosaurus\", \"anserimimus\", \"antarctopelta\", \"antarctosaurus\", \"apatosaurus\", \"aragosaurus\", \"aralosaurus\", \"archaeoceratops\",", "\"insidious\", \"junket\", \"kitsch\", \"litany\", \"lurid\", \"malaise\", \"malinger\", \"mantra\", \"maudlin\", \"mercenary\",", "\"Manjaro\", \"Mint\", \"OS X\", \"macOS\", \"OpenBSD\", \"PCLinuxOS\", \"Slackware\", \"Ubuntu\", \"Windows", "\"giraffatitan\", \"gobisaurus\", \"gorgosaurus\", \"goyocephale\", \"graciliceratops\", \"gryposaurus\", \"guaibasaurus\", \"guanlong\", \"hadrosaurus\", \"hagryphus\",", "\"openSUSE\", ] FOLDERS = [ \"Development\", \"Downloads\", \"Documents\", \"Music\", \"Video\",", "\"bactrosaurus\", \"bagaceratops\", \"bambiraptor\", \"barapasaurus\", \"barosaurus\", \"baryonyx\", \"becklespinax\", \"beipiaosaurus\", \"bellusaurus\", \"borogovia\",", "\"sauropelta\", \"saurophaganax\", \"saurornithoides\", \"scelidosaurus\", \"scutellosaurus\", \"secernosaurus\", \"segisaurus\", \"segnosaurus\", \"seismosaurus\", \"shamosaurus\",", "\"PCLinuxOS\", \"Slackware\", \"Ubuntu\", \"Windows 10\", \"Windows 7\", \"Windows 8\", \"Windows", "\"Emacs Lisp\", \"Erlang\", \"F#\", \"Falcon\", \"Fortran\", \"GNU Octave\", \"Go\", \"Groovy\",", "\"dracorex\", \"dravidosaurus\", \"dromaeosaurus\", \"dromiceiomimus\", \"dryosaurus\", \"dryptosaurus\", \"dubreuillosaurus\", \"edmontonia\", \"edmontosaurus\", \"einiosaurus\",", "\"monolophosaurus\", \"mononykus\", \"mussaurus\", \"muttaburrasaurus\", \"nanotyrannus\", \"nanshiungosaurus\", \"nemegtosaurus\", \"neovenator\", \"neuquenosaurus\", \"nigersaurus\",", "\"GNU General Public License (GPL)\", \"General Public License (LGPL)\", \"MIT", "\"noasaurus\", \"nodosaurus\", \"nomingia\", \"nothronychus\", \"nqwebasaurus\", \"omeisaurus\", \"ornitholestes\", \"ornithomimus\", \"orodromeus\", \"oryctodromeus\",", "\"malaise\", \"malinger\", \"mantra\", \"maudlin\", \"mercenary\", \"misnomer\", \"nirvana\", \"oblivion\", \"ogle\", \"ostracize\",", "\"alectrosaurus\", \"alioramus\", \"allosaurus\", \"alvarezsaurus\", \"amargasaurus\", \"ammosaurus\", \"ampelosaurus\", \"amygdalodon\", \"anatotitan\", \"anchiceratops\",", "\"mantra\", \"maudlin\", \"mercenary\", \"misnomer\", \"nirvana\", \"oblivion\", \"ogle\", \"ostracize\", \"panacea\", \"paradox\",", "\"lycorhinus\", \"magyarosaurus\", \"maiasaura\", \"majungatholus\", \"malawisaurus\", \"mamenchisaurus\", \"mapusaurus\", \"marshosaurus\", \"masiakasaurus\", \"massospondylus\",", "\"scutellosaurus\", \"secernosaurus\", \"segisaurus\", \"segnosaurus\", \"seismosaurus\", \"shamosaurus\", \"shanag\", \"shantungosaurus\", \"shunosaurus\", \"shuvuuia\",", "\"revel\", \"rhetoric\", \"spartan\", \"stigma\", \"stoic\", \"suave\", \"sycophant\", \"tirade\", \"tryst\", \"untenable\",", "\"PowerShell\", \"Prolog\", \"Python\", \"R\", \"Racket\", \"Ruby\", \"Rust\", \"Scala\", \"Scheme\", \"Smalltalk\",", "all the data related to the development.\"\"\" LICENSES = [", "\"lurid\", \"malaise\", \"malinger\", \"mantra\", \"maudlin\", \"mercenary\", \"misnomer\", \"nirvana\", \"oblivion\", \"ogle\",", "[ \"ASP\", \"Assembly\", \"AutoIt\", \"Awk\", \"Bash\", \"C\", \"C Shell\", \"C#\",", "\"melanorosaurus\", \"metriacanthosaurus\", \"microceratops\", \"micropachycephalosaurus\", \"microraptor\", \"minmi\", \"monolophosaurus\", \"mononykus\", \"mussaurus\", \"muttaburrasaurus\",", "Lisp\", \"D\", \"Dart\", \"Delphi\", \"Dylan\", \"ECMAScript\", \"Elixir\", \"Emacs Lisp\", \"Erlang\",", "\"epitome\", \"esoteric\", \"euphemism\", \"faux pas\", \"fiasco\", \"finagle\", \"glib\", \"harbinger\", \"hedonist\",", "\"\"\"Provides all the data related to the development.\"\"\" LICENSES =", "\"afrovenator\", \"agilisaurus\", \"alamosaurus\", \"albertaceratops\", \"albertosaurus\", \"alectrosaurus\", \"alioramus\", \"allosaurus\", \"alvarezsaurus\", \"amargasaurus\",", "FOLDERS = [ \"Development\", \"Downloads\", \"Documents\", \"Music\", \"Video\", \"Work\", \"Pictures\",", "LICENSES = [ \"Apache License, 2.0 (Apache-2.0)\", \"The BSD 3-Clause", "\"brachyceratops\", \"bugenasaura\", \"buitreraptor\", \"camarasaurus\", \"camptosaurus\", \"carnotaurus\", \"caudipteryx\", \"cedarpelta\", \"centrosaurus\", \"ceratosaurus\",", "\"ECMAScript\", \"Elixir\", \"Emacs Lisp\", \"Erlang\", \"F#\", \"Falcon\", \"Fortran\", \"GNU Octave\",", "\"segisaurus\", \"segnosaurus\", \"seismosaurus\", \"shamosaurus\", \"shanag\", \"shantungosaurus\", \"shunosaurus\", \"shuvuuia\", \"silvisaurus\", \"sinocalliopteryx\",", "\"abelisaurus\", \"achelousaurus\", \"achillobator\", \"acrocanthosaurus\", \"aegyptosaurus\", \"afrovenator\", \"agilisaurus\", \"alamosaurus\", \"albertaceratops\", \"albertosaurus\",", "\"eotyrannus\", \"equijubus\", \"erketu\", \"erlikosaurus\", \"euhelopus\", \"euoplocephalus\", \"europasaurus\", \"euskelosaurus\", \"eustreptospondylus\", \"fukuiraptor\",", "\"Objective-C\", \"OCaml\", \"Perl\", \"PHP\", \"PL-I\", \"PL-SQL\", \"PowerShell\", \"Prolog\", \"Python\", \"R\",", "\"gallimimus\", \"gargoyleosaurus\", \"garudimimus\", \"gasosaurus\", \"gasparinisaura\", \"gastonia\", \"giganotosaurus\", \"gilmoreosaurus\", \"giraffatitan\", \"gobisaurus\",", "\"kritosaurus\", \"lamaceratops\", \"lambeosaurus\", \"lapparentosaurus\", \"leaellynasaura\", \"leptoceratops\", \"lesothosaurus\", \"lexovisaurus\", \"liaoceratops\", \"liaoxiornis\",", "\"amygdalodon\", \"anatotitan\", \"anchiceratops\", \"anchisaurus\", \"ankylosaurus\", \"anserimimus\", \"antarctopelta\", \"antarctosaurus\", \"apatosaurus\", \"aragosaurus\",", "\"juravenator\", \"kentrosaurus\", \"khaan\", \"kotasaurus\", \"kritosaurus\", \"lamaceratops\", \"lambeosaurus\", \"lapparentosaurus\", \"leaellynasaura\", \"leptoceratops\",", "\"aardonyx\", \"abelisaurus\", \"achelousaurus\", \"achillobator\", \"acrocanthosaurus\", \"aegyptosaurus\", \"afrovenator\", \"agilisaurus\", \"alamosaurus\", \"albertaceratops\",", "= [ \"Development\", \"Downloads\", \"Documents\", \"Music\", \"Video\", \"Work\", \"Pictures\", \"Desktop\",", "\"maiasaura\", \"majungatholus\", \"malawisaurus\", \"mamenchisaurus\", \"mapusaurus\", \"marshosaurus\", \"masiakasaurus\", \"massospondylus\", \"maxakalisaurus\", \"megalosaurus\",", "\"alamosaurus\", \"albertaceratops\", \"albertosaurus\", \"alectrosaurus\", \"alioramus\", \"allosaurus\", \"alvarezsaurus\", \"amargasaurus\", \"ammosaurus\", \"ampelosaurus\",", "7\", \"Windows 8\", \"Windows 8.1\", \"Zorin\", \"elementaryOS\", \"macOS\", \"openSUSE\", ]", "\"haplocanthosaurus\", \"harpymimus\", \"herrerasaurus\", \"hesperosaurus\", \"heterodontosaurus\", \"homalocephale\", \"huayangosaurus\", \"hylaeosaurus\", \"hypacrosaurus\", \"hypselosaurus\",", "\"mononykus\", \"mussaurus\", \"muttaburrasaurus\", \"nanotyrannus\", \"nanshiungosaurus\", \"nemegtosaurus\", \"neovenator\", \"neuquenosaurus\", \"nigersaurus\", \"nipponosaurus\",", "8.1\", \"Zorin\", \"elementaryOS\", \"macOS\", \"openSUSE\", ] FOLDERS = [ \"Development\",", "\"centrosaurus\", \"ceratosaurus\", \"cetiosauriscus\", \"cetiosaurus\", \"chaoyangsaurus\", \"chasmosaurus\", \"chialingosaurus\", \"chindesaurus\", \"chinshakiangosaurus\", \"chirostenotes\",", "\"suave\", \"sycophant\", \"tirade\", \"tryst\", \"untenable\", \"vicarious\", \"vile\", \"waft\", \"zealous\", ]", "\"gilmoreosaurus\", \"giraffatitan\", \"gobisaurus\", \"gorgosaurus\", \"goyocephale\", \"graciliceratops\", \"gryposaurus\", \"guaibasaurus\", \"guanlong\", \"hadrosaurus\",", "= [ \"aardonyx\", \"abelisaurus\", \"achelousaurus\", \"achillobator\", \"acrocanthosaurus\", \"aegyptosaurus\", \"afrovenator\", \"agilisaurus\",", "\"microceratops\", \"micropachycephalosaurus\", \"microraptor\", \"minmi\", \"monolophosaurus\", \"mononykus\", \"mussaurus\", \"muttaburrasaurus\", \"nanotyrannus\", \"nanshiungosaurus\",", "\"Rust\", \"Scala\", \"Scheme\", \"Smalltalk\", \"Tcl\", \"Tex\", \"Transact-SQL\", \"TypeScript\", \"Z shell\",", "\"eoraptor\", \"eotyrannus\", \"equijubus\", \"erketu\", \"erlikosaurus\", \"euhelopus\", \"euoplocephalus\", \"europasaurus\", \"euskelosaurus\", \"eustreptospondylus\",", "\"coloradisaurus\", \"compsognathus\", \"conchoraptor\", \"confuciusornis\", \"corythosaurus\", \"cryolophosaurus\", \"dacentrurus\", \"daspletosaurus\", \"datousaurus\", \"deinocheirus\",", "\"nirvana\", \"oblivion\", \"ogle\", \"ostracize\", \"panacea\", \"paradox\", \"peevish\", \"propriety\", \"revel\", \"rhetoric\",", "data related to the development.\"\"\" LICENSES = [ \"Apache License,", "\"dromiceiomimus\", \"dryosaurus\", \"dryptosaurus\", \"dubreuillosaurus\", \"edmontonia\", \"edmontosaurus\", \"einiosaurus\", \"elaphrosaurus\", \"emausaurus\", \"eolambia\",", "\"ASP\", \"Assembly\", \"AutoIt\", \"Awk\", \"Bash\", \"C\", \"C Shell\", \"C#\", \"C++\",", "X\", \"macOS\", \"OpenBSD\", \"PCLinuxOS\", \"Slackware\", \"Ubuntu\", \"Windows 10\", \"Windows 7\",", "License (MIT)\", \"Mozilla Public License 2.0 (MPL-2.0)\", \"Common Development and", "\"Go\", \"Groovy\", \"Haskell\", \"haXe\", \"Io\", \"J#\", \"Java\", \"JavaScript\", \"Julia\", \"Kotlin\",", "] PROJECT_NAMES = [ \"aardonyx\", \"abelisaurus\", \"achelousaurus\", \"achillobator\", \"acrocanthosaurus\", \"aegyptosaurus\",", "Octave\", \"Go\", \"Groovy\", \"Haskell\", \"haXe\", \"Io\", \"J#\", \"Java\", \"JavaScript\", \"Julia\",", "\"guaibasaurus\", \"guanlong\", \"hadrosaurus\", \"hagryphus\", \"haplocanthosaurus\", \"harpymimus\", \"herrerasaurus\", \"hesperosaurus\", \"heterodontosaurus\", \"homalocephale\",", "\"saurophaganax\", \"saurornithoides\", \"scelidosaurus\", \"scutellosaurus\", \"secernosaurus\", \"segisaurus\", \"segnosaurus\", \"seismosaurus\", \"shamosaurus\", \"shanag\",", "License (GPL)\", \"General Public License (LGPL)\", \"MIT License (MIT)\", \"Mozilla", "\"hylaeosaurus\", \"hypacrosaurus\", \"hypselosaurus\", \"hypsilophodon\", \"iguanodon\", \"indosuchus\", \"ingenia\", \"irritator\", \"isisaurus\", \"janenschia\",", "\"sinornithosaurus\", \"sinosauropteryx\", \"sinraptor\", \"sinvenator\", \"zalmoxes\", \"zephyrosaurus\", \"zuniceratops\", \"byzantine\", \"svengali\", \"accolade\",", "\"Groovy\", \"Haskell\", \"haXe\", \"Io\", \"J#\", \"Java\", \"JavaScript\", \"Julia\", \"Kotlin\", \"Lisp\",", "\"heresy\", \"idyllic\", \"insidious\", \"junket\", \"kitsch\", \"litany\", \"lurid\", \"malaise\", \"malinger\", \"mantra\",", "\"Elixir\", \"Emacs Lisp\", \"Erlang\", \"F#\", \"Falcon\", \"Fortran\", \"GNU Octave\", \"Go\",", "\"Video\", \"Work\", \"Pictures\", \"Desktop\", \"Study\", ] PROJECT_NAMES = [ \"aardonyx\",", "\"avimimus\", \"azendohsaurus\", \"bactrosaurus\", \"bagaceratops\", \"bambiraptor\", \"barapasaurus\", \"barosaurus\", \"baryonyx\", \"becklespinax\", \"beipiaosaurus\",", "\"corythosaurus\", \"cryolophosaurus\", \"dacentrurus\", \"daspletosaurus\", \"datousaurus\", \"deinocheirus\", \"deinonychus\", \"deltadromeus\", \"diceratops\", \"dicraeosaurus\",", "\"emausaurus\", \"eolambia\", \"eoraptor\", \"eotyrannus\", \"equijubus\", \"erketu\", \"erlikosaurus\", \"euhelopus\", \"euoplocephalus\", \"europasaurus\",", "\"cetiosauriscus\", \"cetiosaurus\", \"chaoyangsaurus\", \"chasmosaurus\", \"chialingosaurus\", \"chindesaurus\", \"chinshakiangosaurus\", \"chirostenotes\", \"chubutisaurus\", \"chungkingosaurus\",", "\"erketu\", \"erlikosaurus\", \"euhelopus\", \"euoplocephalus\", \"europasaurus\", \"euskelosaurus\", \"eustreptospondylus\", \"fukuiraptor\", \"fukuisaurus\", \"gallimimus\",", "\"edmontosaurus\", \"einiosaurus\", \"elaphrosaurus\", \"emausaurus\", \"eolambia\", \"eoraptor\", \"eotyrannus\", \"equijubus\", \"erketu\", \"erlikosaurus\",", "\"huayangosaurus\", \"hylaeosaurus\", \"hypacrosaurus\", \"hypselosaurus\", \"hypsilophodon\", \"iguanodon\", \"indosuchus\", \"ingenia\", \"irritator\", \"isisaurus\",", "\"ceratosaurus\", \"cetiosauriscus\", \"cetiosaurus\", \"chaoyangsaurus\", \"chasmosaurus\", \"chialingosaurus\", \"chindesaurus\", \"chinshakiangosaurus\", \"chirostenotes\", \"chubutisaurus\",", "\"Scala\", \"Scheme\", \"Smalltalk\", \"Tcl\", \"Tex\", \"Transact-SQL\", \"TypeScript\", \"Z shell\", ]", "\"leptoceratops\", \"lesothosaurus\", \"lexovisaurus\", \"liaoceratops\", \"liaoxiornis\", \"ligabuesaurus\", \"liliensternus\", \"lophorhothon\", \"lophostropheus\", \"lufengosaurus\",", "\"sinosauropteryx\", \"sinraptor\", \"sinvenator\", \"zalmoxes\", \"zephyrosaurus\", \"zuniceratops\", \"byzantine\", \"svengali\", \"accolade\", \"acrimony\",", "development.\"\"\" LICENSES = [ \"Apache License, 2.0 (Apache-2.0)\", \"The BSD", "\"euphemism\", \"faux pas\", \"fiasco\", \"finagle\", \"glib\", \"harbinger\", \"hedonist\", \"heresy\", \"idyllic\",", "\"gastonia\", \"giganotosaurus\", \"gilmoreosaurus\", \"giraffatitan\", \"gobisaurus\", \"gorgosaurus\", \"goyocephale\", \"graciliceratops\", \"gryposaurus\", \"guaibasaurus\",", "\"Erlang\", \"F#\", \"Falcon\", \"Fortran\", \"GNU Octave\", \"Go\", \"Groovy\", \"Haskell\", \"haXe\",", "\"bugenasaura\", \"buitreraptor\", \"camarasaurus\", \"camptosaurus\", \"carnotaurus\", \"caudipteryx\", \"cedarpelta\", \"centrosaurus\", \"ceratosaurus\", \"cetiosauriscus\",", "\"lophorhothon\", \"lophostropheus\", \"lufengosaurus\", \"lurdusaurus\", \"lycorhinus\", \"magyarosaurus\", \"maiasaura\", \"majungatholus\", \"malawisaurus\", \"mamenchisaurus\",", "\"maudlin\", \"mercenary\", \"misnomer\", \"nirvana\", \"oblivion\", \"ogle\", \"ostracize\", \"panacea\", \"paradox\", \"peevish\",", "\"hagryphus\", \"haplocanthosaurus\", \"harpymimus\", \"herrerasaurus\", \"hesperosaurus\", \"heterodontosaurus\", \"homalocephale\", \"huayangosaurus\", \"hylaeosaurus\", \"hypacrosaurus\",", "= [ \"Apache License, 2.0 (Apache-2.0)\", \"The BSD 3-Clause License\",", "\"coelophysis\", \"coelurus\", \"coloradisaurus\", \"compsognathus\", \"conchoraptor\", \"confuciusornis\", \"corythosaurus\", \"cryolophosaurus\", \"dacentrurus\", \"daspletosaurus\",", "\"edmontonia\", \"edmontosaurus\", \"einiosaurus\", \"elaphrosaurus\", \"emausaurus\", \"eolambia\", \"eoraptor\", \"eotyrannus\", \"equijubus\", \"erketu\",", "\"saltopus\", \"sarcosaurus\", \"saurolophus\", \"sauropelta\", \"saurophaganax\", \"saurornithoides\", \"scelidosaurus\", \"scutellosaurus\", \"secernosaurus\", \"segisaurus\",", "Lisp\", \"Erlang\", \"F#\", \"Falcon\", \"Fortran\", \"GNU Octave\", \"Go\", \"Groovy\", \"Haskell\",", "\"Z shell\", ] OS = [ \"Arch\", \"CentOS\", \"Debian\", \"Fedora\",", "\"Fortran\", \"GNU Octave\", \"Go\", \"Groovy\", \"Haskell\", \"haXe\", \"Io\", \"J#\", \"Java\",", "\"C++\", \"Caml\", \"Ceylon\", \"Clojure\", \"CoffeeScript\", \"Common Lisp\", \"D\", \"Dart\", \"Delphi\",", "\"Transact-SQL\", \"TypeScript\", \"Z shell\", ] OS = [ \"Arch\", \"CentOS\",", "\"JavaScript\", \"Julia\", \"Kotlin\", \"Lisp\", \"Lua\", \"Mathematica\", \"Objective-C\", \"OCaml\", \"Perl\", \"PHP\",", "] FOLDERS = [ \"Development\", \"Downloads\", \"Documents\", \"Music\", \"Video\", \"Work\",", "\"zephyrosaurus\", \"zuniceratops\", \"byzantine\", \"svengali\", \"accolade\", \"acrimony\", \"angst\", \"anomaly\", \"antidote\", \"baroque\",", "\"hesperosaurus\", \"heterodontosaurus\", \"homalocephale\", \"huayangosaurus\", \"hylaeosaurus\", \"hypacrosaurus\", \"hypselosaurus\", \"hypsilophodon\", \"iguanodon\", \"indosuchus\",", "\"atlascopcosaurus\", \"aucasaurus\", \"austrosaurus\", \"avaceratops\", \"avalonia\", \"avimimus\", \"azendohsaurus\", \"bactrosaurus\", \"bagaceratops\", \"bambiraptor\",", "\"propriety\", \"revel\", \"rhetoric\", \"spartan\", \"stigma\", \"stoic\", \"suave\", \"sycophant\", \"tirade\", \"tryst\",", "\"General Public License (LGPL)\", \"MIT License (MIT)\", \"Mozilla Public License", "\"archaeopteryx\", \"archaeornithomimus\", \"argentinosaurus\", \"arrhinoceratops\", \"atlascopcosaurus\", \"aucasaurus\", \"austrosaurus\", \"avaceratops\", \"avalonia\", \"avimimus\",", "\"orodromeus\", \"oryctodromeus\", \"othnielia\", \"ouranosaurus\", \"oviraptor\", \"rebbachisaurus\", \"rhabdodon\", \"rhoetosaurus\", \"rinchenia\", \"riojasaurus\",", "\"khaan\", \"kotasaurus\", \"kritosaurus\", \"lamaceratops\", \"lambeosaurus\", \"lapparentosaurus\", \"leaellynasaura\", \"leptoceratops\", \"lesothosaurus\", \"lexovisaurus\",", "Shell\", \"C#\", \"C++\", \"Caml\", \"Ceylon\", \"Clojure\", \"CoffeeScript\", \"Common Lisp\", \"D\",", "\"aragosaurus\", \"aralosaurus\", \"archaeoceratops\", \"archaeopteryx\", \"archaeornithomimus\", \"argentinosaurus\", \"arrhinoceratops\", \"atlascopcosaurus\", \"aucasaurus\", \"austrosaurus\",", "\"anatotitan\", \"anchiceratops\", \"anchisaurus\", \"ankylosaurus\", \"anserimimus\", \"antarctopelta\", \"antarctosaurus\", \"apatosaurus\", \"aragosaurus\", \"aralosaurus\",", "\"masiakasaurus\", \"massospondylus\", \"maxakalisaurus\", \"megalosaurus\", \"melanorosaurus\", \"metriacanthosaurus\", \"microceratops\", \"micropachycephalosaurus\", \"microraptor\", \"minmi\",", "\"Mint\", \"OS X\", \"macOS\", \"OpenBSD\", \"PCLinuxOS\", \"Slackware\", \"Ubuntu\", \"Windows 10\",", "\"chaoyangsaurus\", \"chasmosaurus\", \"chialingosaurus\", \"chindesaurus\", \"chinshakiangosaurus\", \"chirostenotes\", \"chubutisaurus\", \"chungkingosaurus\", \"citipati\", \"coelophysis\",", "\"esoteric\", \"euphemism\", \"faux pas\", \"fiasco\", \"finagle\", \"glib\", \"harbinger\", \"hedonist\", \"heresy\",", "\"chialingosaurus\", \"chindesaurus\", \"chinshakiangosaurus\", \"chirostenotes\", \"chubutisaurus\", \"chungkingosaurus\", \"citipati\", \"coelophysis\", \"coelurus\", \"coloradisaurus\",", "\"chungkingosaurus\", \"citipati\", \"coelophysis\", \"coelurus\", \"coloradisaurus\", \"compsognathus\", \"conchoraptor\", \"confuciusornis\", \"corythosaurus\", \"cryolophosaurus\",", "\"fiasco\", \"finagle\", \"glib\", \"harbinger\", \"hedonist\", \"heresy\", \"idyllic\", \"insidious\", \"junket\", \"kitsch\",", "\"saichania\", \"saltasaurus\", \"saltopus\", \"sarcosaurus\", \"saurolophus\", \"sauropelta\", \"saurophaganax\", \"saurornithoides\", \"scelidosaurus\", \"scutellosaurus\",", "\"cryolophosaurus\", \"dacentrurus\", \"daspletosaurus\", \"datousaurus\", \"deinocheirus\", \"deinonychus\", \"deltadromeus\", \"diceratops\", \"dicraeosaurus\", \"dilophosaurus\",", "\"confuciusornis\", \"corythosaurus\", \"cryolophosaurus\", \"dacentrurus\", \"daspletosaurus\", \"datousaurus\", \"deinocheirus\", \"deinonychus\", \"deltadromeus\", \"diceratops\",", "\"euhelopus\", \"euoplocephalus\", \"europasaurus\", \"euskelosaurus\", \"eustreptospondylus\", \"fukuiraptor\", \"fukuisaurus\", \"gallimimus\", \"gargoyleosaurus\", \"garudimimus\",", "\"ligabuesaurus\", \"liliensternus\", \"lophorhothon\", \"lophostropheus\", \"lufengosaurus\", \"lurdusaurus\", \"lycorhinus\", \"magyarosaurus\", \"maiasaura\", \"majungatholus\",", "(LGPL)\", \"MIT License (MIT)\", \"Mozilla Public License 2.0 (MPL-2.0)\", \"Common", "\"shanag\", \"shantungosaurus\", \"shunosaurus\", \"shuvuuia\", \"silvisaurus\", \"sinocalliopteryx\", \"sinornithosaurus\", \"sinosauropteryx\", \"sinraptor\", \"sinvenator\",", "\"beipiaosaurus\", \"bellusaurus\", \"borogovia\", \"brachiosaurus\", \"brachyceratops\", \"bugenasaura\", \"buitreraptor\", \"camarasaurus\", \"camptosaurus\", \"carnotaurus\",", "\"rinchenia\", \"riojasaurus\", \"rugops\", \"saichania\", \"saltasaurus\", \"saltopus\", \"sarcosaurus\", \"saurolophus\", \"sauropelta\", \"saurophaganax\",", "\"avaceratops\", \"avalonia\", \"avimimus\", \"azendohsaurus\", \"bactrosaurus\", \"bagaceratops\", \"bambiraptor\", \"barapasaurus\", \"barosaurus\", \"baryonyx\",", "\"hadrosaurus\", \"hagryphus\", \"haplocanthosaurus\", \"harpymimus\", \"herrerasaurus\", \"hesperosaurus\", \"heterodontosaurus\", \"homalocephale\", \"huayangosaurus\", \"hylaeosaurus\",", "\"deja-vu\", \"dichotomy\", \"elan\", \"ennui\", \"epitome\", \"esoteric\", \"euphemism\", \"faux pas\", \"fiasco\",", "\"albertaceratops\", \"albertosaurus\", \"alectrosaurus\", \"alioramus\", \"allosaurus\", \"alvarezsaurus\", \"amargasaurus\", \"ammosaurus\", \"ampelosaurus\", \"amygdalodon\",", "\"Tcl\", \"Tex\", \"Transact-SQL\", \"TypeScript\", \"Z shell\", ] OS = [", "\"nothronychus\", \"nqwebasaurus\", \"omeisaurus\", \"ornitholestes\", \"ornithomimus\", \"orodromeus\", \"oryctodromeus\", \"othnielia\", \"ouranosaurus\", \"oviraptor\",", "\"Java\", \"JavaScript\", \"Julia\", \"Kotlin\", \"Lisp\", \"Lua\", \"Mathematica\", \"Objective-C\", \"OCaml\", \"Perl\",", "\"R\", \"Racket\", \"Ruby\", \"Rust\", \"Scala\", \"Scheme\", \"Smalltalk\", \"Tcl\", \"Tex\", \"Transact-SQL\",", "\"heterodontosaurus\", \"homalocephale\", \"huayangosaurus\", \"hylaeosaurus\", \"hypacrosaurus\", \"hypselosaurus\", \"hypsilophodon\", \"iguanodon\", \"indosuchus\", \"ingenia\",", "\"Clojure\", \"CoffeeScript\", \"Common Lisp\", \"D\", \"Dart\", \"Delphi\", \"Dylan\", \"ECMAScript\", \"Elixir\",", "(MPL-2.0)\", \"Common Development and Distribution License (CDDL-1.0)\", \"Eclipse Public License", "License (EPL-1.0)\", ] PROGRAMMING_LANGS = [ \"ASP\", \"Assembly\", \"AutoIt\", \"Awk\",", "\"bellusaurus\", \"borogovia\", \"brachiosaurus\", \"brachyceratops\", \"bugenasaura\", \"buitreraptor\", \"camarasaurus\", \"camptosaurus\", \"carnotaurus\", \"caudipteryx\",", "\"camarasaurus\", \"camptosaurus\", \"carnotaurus\", \"caudipteryx\", \"cedarpelta\", \"centrosaurus\", \"ceratosaurus\", \"cetiosauriscus\", \"cetiosaurus\", \"chaoyangsaurus\",", "\"graciliceratops\", \"gryposaurus\", \"guaibasaurus\", \"guanlong\", \"hadrosaurus\", \"hagryphus\", \"haplocanthosaurus\", \"harpymimus\", \"herrerasaurus\", \"hesperosaurus\",", "\"nodosaurus\", \"nomingia\", \"nothronychus\", \"nqwebasaurus\", \"omeisaurus\", \"ornitholestes\", \"ornithomimus\", \"orodromeus\", \"oryctodromeus\", \"othnielia\",", "\"arrhinoceratops\", \"atlascopcosaurus\", \"aucasaurus\", \"austrosaurus\", \"avaceratops\", \"avalonia\", \"avimimus\", \"azendohsaurus\", \"bactrosaurus\", \"bagaceratops\",", "\"zuniceratops\", \"byzantine\", \"svengali\", \"accolade\", \"acrimony\", \"angst\", \"anomaly\", \"antidote\", \"baroque\", \"bona_fide\",", "\"coelurus\", \"coloradisaurus\", \"compsognathus\", \"conchoraptor\", \"confuciusornis\", \"corythosaurus\", \"cryolophosaurus\", \"dacentrurus\", \"daspletosaurus\", \"datousaurus\",", "\"shantungosaurus\", \"shunosaurus\", \"shuvuuia\", \"silvisaurus\", \"sinocalliopteryx\", \"sinornithosaurus\", \"sinosauropteryx\", \"sinraptor\", \"sinvenator\", \"zalmoxes\",", "\"austrosaurus\", \"avaceratops\", \"avalonia\", \"avimimus\", \"azendohsaurus\", \"bactrosaurus\", \"bagaceratops\", \"bambiraptor\", \"barapasaurus\", \"barosaurus\",", "\"micropachycephalosaurus\", \"microraptor\", \"minmi\", \"monolophosaurus\", \"mononykus\", \"mussaurus\", \"muttaburrasaurus\", \"nanotyrannus\", \"nanshiungosaurus\", \"nemegtosaurus\",", "\"eustreptospondylus\", \"fukuiraptor\", \"fukuisaurus\", \"gallimimus\", \"gargoyleosaurus\", \"garudimimus\", \"gasosaurus\", \"gasparinisaura\", \"gastonia\", \"giganotosaurus\",", "\"junket\", \"kitsch\", \"litany\", \"lurid\", \"malaise\", \"malinger\", \"mantra\", \"maudlin\", \"mercenary\", \"misnomer\",", "\"gargoyleosaurus\", \"garudimimus\", \"gasosaurus\", \"gasparinisaura\", \"gastonia\", \"giganotosaurus\", \"gilmoreosaurus\", \"giraffatitan\", \"gobisaurus\", \"gorgosaurus\",", "\"ammosaurus\", \"ampelosaurus\", \"amygdalodon\", \"anatotitan\", \"anchiceratops\", \"anchisaurus\", \"ankylosaurus\", \"anserimimus\", \"antarctopelta\", \"antarctosaurus\",", "\"Zorin\", \"elementaryOS\", \"macOS\", \"openSUSE\", ] FOLDERS = [ \"Development\", \"Downloads\",", "\"chubutisaurus\", \"chungkingosaurus\", \"citipati\", \"coelophysis\", \"coelurus\", \"coloradisaurus\", \"compsognathus\", \"conchoraptor\", \"confuciusornis\", \"corythosaurus\",", "\"caudipteryx\", \"cedarpelta\", \"centrosaurus\", \"ceratosaurus\", \"cetiosauriscus\", \"cetiosaurus\", \"chaoyangsaurus\", \"chasmosaurus\", \"chialingosaurus\", \"chindesaurus\",", "\"cacophony\", \"caustic\", \"charisma\", \"cloying\", \"deja-vu\", \"dichotomy\", \"elan\", \"ennui\", \"epitome\", \"esoteric\",", "\"misnomer\", \"nirvana\", \"oblivion\", \"ogle\", \"ostracize\", \"panacea\", \"paradox\", \"peevish\", \"propriety\", \"revel\",", "\"Lua\", \"Mathematica\", \"Objective-C\", \"OCaml\", \"Perl\", \"PHP\", \"PL-I\", \"PL-SQL\", \"PowerShell\", \"Prolog\",", "\"dubreuillosaurus\", \"edmontonia\", \"edmontosaurus\", \"einiosaurus\", \"elaphrosaurus\", \"emausaurus\", \"eolambia\", \"eoraptor\", \"eotyrannus\", \"equijubus\",", "\"aralosaurus\", \"archaeoceratops\", \"archaeopteryx\", \"archaeornithomimus\", \"argentinosaurus\", \"arrhinoceratops\", \"atlascopcosaurus\", \"aucasaurus\", \"austrosaurus\", \"avaceratops\",", "\"archaeoceratops\", \"archaeopteryx\", \"archaeornithomimus\", \"argentinosaurus\", \"arrhinoceratops\", \"atlascopcosaurus\", \"aucasaurus\", \"austrosaurus\", \"avaceratops\", \"avalonia\",", "\"anomaly\", \"antidote\", \"baroque\", \"bona_fide\", \"bourgeois\", \"bravado\", \"brogue\", \"brusque\", \"cacophony\", \"caustic\",", "\"rhabdodon\", \"rhoetosaurus\", \"rinchenia\", \"riojasaurus\", \"rugops\", \"saichania\", \"saltasaurus\", \"saltopus\", \"sarcosaurus\", \"saurolophus\",", "the data related to the development.\"\"\" LICENSES = [ \"Apache", "\"archaeornithomimus\", \"argentinosaurus\", \"arrhinoceratops\", \"atlascopcosaurus\", \"aucasaurus\", \"austrosaurus\", \"avaceratops\", \"avalonia\", \"avimimus\", \"azendohsaurus\",", "\"erlikosaurus\", \"euhelopus\", \"euoplocephalus\", \"europasaurus\", \"euskelosaurus\", \"eustreptospondylus\", \"fukuiraptor\", \"fukuisaurus\", \"gallimimus\", \"gargoyleosaurus\",", "\"riojasaurus\", \"rugops\", \"saichania\", \"saltasaurus\", \"saltopus\", \"sarcosaurus\", \"saurolophus\", \"sauropelta\", \"saurophaganax\", \"saurornithoides\",", "\"malawisaurus\", \"mamenchisaurus\", \"mapusaurus\", \"marshosaurus\", \"masiakasaurus\", \"massospondylus\", \"maxakalisaurus\", \"megalosaurus\", \"melanorosaurus\", \"metriacanthosaurus\",", "\"hypacrosaurus\", \"hypselosaurus\", \"hypsilophodon\", \"iguanodon\", \"indosuchus\", \"ingenia\", \"irritator\", \"isisaurus\", \"janenschia\", \"jaxartosaurus\",", "\"The BSD 2-Clause License\", \"GNU General Public License (GPL)\", \"General", "\"lufengosaurus\", \"lurdusaurus\", \"lycorhinus\", \"magyarosaurus\", \"maiasaura\", \"majungatholus\", \"malawisaurus\", \"mamenchisaurus\", \"mapusaurus\", \"marshosaurus\",", "\"haXe\", \"Io\", \"J#\", \"Java\", \"JavaScript\", \"Julia\", \"Kotlin\", \"Lisp\", \"Lua\", \"Mathematica\",", "\"oblivion\", \"ogle\", \"ostracize\", \"panacea\", \"paradox\", \"peevish\", \"propriety\", \"revel\", \"rhetoric\", \"spartan\",", "\"C\", \"C Shell\", \"C#\", \"C++\", \"Caml\", \"Ceylon\", \"Clojure\", \"CoffeeScript\", \"Common", "\"C#\", \"C++\", \"Caml\", \"Ceylon\", \"Clojure\", \"CoffeeScript\", \"Common Lisp\", \"D\", \"Dart\",", "\"Lubuntu\", \"Manjaro\", \"Mint\", \"OS X\", \"macOS\", \"OpenBSD\", \"PCLinuxOS\", \"Slackware\", \"Ubuntu\",", "\"gryposaurus\", \"guaibasaurus\", \"guanlong\", \"hadrosaurus\", \"hagryphus\", \"haplocanthosaurus\", \"harpymimus\", \"herrerasaurus\", \"hesperosaurus\", \"heterodontosaurus\",", "\"acrimony\", \"angst\", \"anomaly\", \"antidote\", \"baroque\", \"bona_fide\", \"bourgeois\", \"bravado\", \"brogue\", \"brusque\",", "\"achelousaurus\", \"achillobator\", \"acrocanthosaurus\", \"aegyptosaurus\", \"afrovenator\", \"agilisaurus\", \"alamosaurus\", \"albertaceratops\", \"albertosaurus\", \"alectrosaurus\",", "\"idyllic\", \"insidious\", \"junket\", \"kitsch\", \"litany\", \"lurid\", \"malaise\", \"malinger\", \"mantra\", \"maudlin\",", "\"C Shell\", \"C#\", \"C++\", \"Caml\", \"Ceylon\", \"Clojure\", \"CoffeeScript\", \"Common Lisp\",", "\"faux pas\", \"fiasco\", \"finagle\", \"glib\", \"harbinger\", \"hedonist\", \"heresy\", \"idyllic\", \"insidious\",", "\"nqwebasaurus\", \"omeisaurus\", \"ornitholestes\", \"ornithomimus\", \"orodromeus\", \"oryctodromeus\", \"othnielia\", \"ouranosaurus\", \"oviraptor\", \"rebbachisaurus\",", "\"jinzhousaurus\", \"jobaria\", \"juravenator\", \"kentrosaurus\", \"khaan\", \"kotasaurus\", \"kritosaurus\", \"lamaceratops\", \"lambeosaurus\", \"lapparentosaurus\",", "\"hypsilophodon\", \"iguanodon\", \"indosuchus\", \"ingenia\", \"irritator\", \"isisaurus\", \"janenschia\", \"jaxartosaurus\", \"jingshanosaurus\", \"jinzhousaurus\",", "(Apache-2.0)\", \"The BSD 3-Clause License\", \"The BSD 2-Clause License\", \"GNU", "\"PL-SQL\", \"PowerShell\", \"Prolog\", \"Python\", \"R\", \"Racket\", \"Ruby\", \"Rust\", \"Scala\", \"Scheme\",", "\"zalmoxes\", \"zephyrosaurus\", \"zuniceratops\", \"byzantine\", \"svengali\", \"accolade\", \"acrimony\", \"angst\", \"anomaly\", \"antidote\",", "(MIT)\", \"Mozilla Public License 2.0 (MPL-2.0)\", \"Common Development and Distribution", "] PROGRAMMING_LANGS = [ \"ASP\", \"Assembly\", \"AutoIt\", \"Awk\", \"Bash\", \"C\",", "\"metriacanthosaurus\", \"microceratops\", \"micropachycephalosaurus\", \"microraptor\", \"minmi\", \"monolophosaurus\", \"mononykus\", \"mussaurus\", \"muttaburrasaurus\", \"nanotyrannus\",", "\"charisma\", \"cloying\", \"deja-vu\", \"dichotomy\", \"elan\", \"ennui\", \"epitome\", \"esoteric\", \"euphemism\", \"faux", "Public License (LGPL)\", \"MIT License (MIT)\", \"Mozilla Public License 2.0", "Development and Distribution License (CDDL-1.0)\", \"Eclipse Public License (EPL-1.0)\", ]", "\"Delphi\", \"Dylan\", \"ECMAScript\", \"Elixir\", \"Emacs Lisp\", \"Erlang\", \"F#\", \"Falcon\", \"Fortran\",", "\"rhoetosaurus\", \"rinchenia\", \"riojasaurus\", \"rugops\", \"saichania\", \"saltasaurus\", \"saltopus\", \"sarcosaurus\", \"saurolophus\", \"sauropelta\",", "PROJECT_NAMES = [ \"aardonyx\", \"abelisaurus\", \"achelousaurus\", \"achillobator\", \"acrocanthosaurus\", \"aegyptosaurus\", \"afrovenator\",", "\"fukuiraptor\", \"fukuisaurus\", \"gallimimus\", \"gargoyleosaurus\", \"garudimimus\", \"gasosaurus\", \"gasparinisaura\", \"gastonia\", \"giganotosaurus\", \"gilmoreosaurus\",", "\"megalosaurus\", \"melanorosaurus\", \"metriacanthosaurus\", \"microceratops\", \"micropachycephalosaurus\", \"microraptor\", \"minmi\", \"monolophosaurus\", \"mononykus\", \"mussaurus\",", "\"ornitholestes\", \"ornithomimus\", \"orodromeus\", \"oryctodromeus\", \"othnielia\", \"ouranosaurus\", \"oviraptor\", \"rebbachisaurus\", \"rhabdodon\", \"rhoetosaurus\",", "\"cetiosaurus\", \"chaoyangsaurus\", \"chasmosaurus\", \"chialingosaurus\", \"chindesaurus\", \"chinshakiangosaurus\", \"chirostenotes\", \"chubutisaurus\", \"chungkingosaurus\", \"citipati\",", "\"Tex\", \"Transact-SQL\", \"TypeScript\", \"Z shell\", ] OS = [ \"Arch\",", "\"deltadromeus\", \"diceratops\", \"dicraeosaurus\", \"dilophosaurus\", \"diplodocus\", \"dracorex\", \"dravidosaurus\", \"dromaeosaurus\", \"dromiceiomimus\", \"dryosaurus\",", "2.0 (MPL-2.0)\", \"Common Development and Distribution License (CDDL-1.0)\", \"Eclipse Public", "<filename>mimesis/data/int/development.py \"\"\"Provides all the data related to the development.\"\"\" LICENSES", "\"europasaurus\", \"euskelosaurus\", \"eustreptospondylus\", \"fukuiraptor\", \"fukuisaurus\", \"gallimimus\", \"gargoyleosaurus\", \"garudimimus\", \"gasosaurus\", \"gasparinisaura\",", "\"ogle\", \"ostracize\", \"panacea\", \"paradox\", \"peevish\", \"propriety\", \"revel\", \"rhetoric\", \"spartan\", \"stigma\",", "\"diplodocus\", \"dracorex\", \"dravidosaurus\", \"dromaeosaurus\", \"dromiceiomimus\", \"dryosaurus\", \"dryptosaurus\", \"dubreuillosaurus\", \"edmontonia\", \"edmontosaurus\",", "\"Debian\", \"Fedora\", \"FreeBSD\", \"Gentoo\", \"Kali\", \"Lubuntu\", \"Manjaro\", \"Mint\", \"OS X\",", "\"jobaria\", \"juravenator\", \"kentrosaurus\", \"khaan\", \"kotasaurus\", \"kritosaurus\", \"lamaceratops\", \"lambeosaurus\", \"lapparentosaurus\", \"leaellynasaura\",", "\"apatosaurus\", \"aragosaurus\", \"aralosaurus\", \"archaeoceratops\", \"archaeopteryx\", \"archaeornithomimus\", \"argentinosaurus\", \"arrhinoceratops\", \"atlascopcosaurus\", \"aucasaurus\",", "\"OS X\", \"macOS\", \"OpenBSD\", \"PCLinuxOS\", \"Slackware\", \"Ubuntu\", \"Windows 10\", \"Windows", "\"homalocephale\", \"huayangosaurus\", \"hylaeosaurus\", \"hypacrosaurus\", \"hypselosaurus\", \"hypsilophodon\", \"iguanodon\", \"indosuchus\", \"ingenia\", \"irritator\",", "\"CoffeeScript\", \"Common Lisp\", \"D\", \"Dart\", \"Delphi\", \"Dylan\", \"ECMAScript\", \"Elixir\", \"Emacs", "[ \"Arch\", \"CentOS\", \"Debian\", \"Fedora\", \"FreeBSD\", \"Gentoo\", \"Kali\", \"Lubuntu\", \"Manjaro\",", "\"Smalltalk\", \"Tcl\", \"Tex\", \"Transact-SQL\", \"TypeScript\", \"Z shell\", ] OS =", "\"buitreraptor\", \"camarasaurus\", \"camptosaurus\", \"carnotaurus\", \"caudipteryx\", \"cedarpelta\", \"centrosaurus\", \"ceratosaurus\", \"cetiosauriscus\", \"cetiosaurus\",", "\"PL-I\", \"PL-SQL\", \"PowerShell\", \"Prolog\", \"Python\", \"R\", \"Racket\", \"Ruby\", \"Rust\", \"Scala\",", "\"shuvuuia\", \"silvisaurus\", \"sinocalliopteryx\", \"sinornithosaurus\", \"sinosauropteryx\", \"sinraptor\", \"sinvenator\", \"zalmoxes\", \"zephyrosaurus\", \"zuniceratops\",", "\"neuquenosaurus\", \"nigersaurus\", \"nipponosaurus\", \"noasaurus\", \"nodosaurus\", \"nomingia\", \"nothronychus\", \"nqwebasaurus\", \"omeisaurus\", \"ornitholestes\",", "\"baryonyx\", \"becklespinax\", \"beipiaosaurus\", \"bellusaurus\", \"borogovia\", \"brachiosaurus\", \"brachyceratops\", \"bugenasaura\", \"buitreraptor\", \"camarasaurus\",", "\"rugops\", \"saichania\", \"saltasaurus\", \"saltopus\", \"sarcosaurus\", \"saurolophus\", \"sauropelta\", \"saurophaganax\", \"saurornithoides\", \"scelidosaurus\",", "\"Scheme\", \"Smalltalk\", \"Tcl\", \"Tex\", \"Transact-SQL\", \"TypeScript\", \"Z shell\", ] OS", "and Distribution License (CDDL-1.0)\", \"Eclipse Public License (EPL-1.0)\", ] PROGRAMMING_LANGS", "\"nipponosaurus\", \"noasaurus\", \"nodosaurus\", \"nomingia\", \"nothronychus\", \"nqwebasaurus\", \"omeisaurus\", \"ornitholestes\", \"ornithomimus\", \"orodromeus\",", "\"bravado\", \"brogue\", \"brusque\", \"cacophony\", \"caustic\", \"charisma\", \"cloying\", \"deja-vu\", \"dichotomy\", \"elan\",", "\"lexovisaurus\", \"liaoceratops\", \"liaoxiornis\", \"ligabuesaurus\", \"liliensternus\", \"lophorhothon\", \"lophostropheus\", \"lufengosaurus\", \"lurdusaurus\", \"lycorhinus\",", "\"agilisaurus\", \"alamosaurus\", \"albertaceratops\", \"albertosaurus\", \"alectrosaurus\", \"alioramus\", \"allosaurus\", \"alvarezsaurus\", \"amargasaurus\", \"ammosaurus\",", "\"scelidosaurus\", \"scutellosaurus\", \"secernosaurus\", \"segisaurus\", \"segnosaurus\", \"seismosaurus\", \"shamosaurus\", \"shanag\", \"shantungosaurus\", \"shunosaurus\",", "\"barosaurus\", \"baryonyx\", \"becklespinax\", \"beipiaosaurus\", \"bellusaurus\", \"borogovia\", \"brachiosaurus\", \"brachyceratops\", \"bugenasaura\", \"buitreraptor\",", "3-Clause License\", \"The BSD 2-Clause License\", \"GNU General Public License", "related to the development.\"\"\" LICENSES = [ \"Apache License, 2.0", "\"antidote\", \"baroque\", \"bona_fide\", \"bourgeois\", \"bravado\", \"brogue\", \"brusque\", \"cacophony\", \"caustic\", \"charisma\",", "\"daspletosaurus\", \"datousaurus\", \"deinocheirus\", \"deinonychus\", \"deltadromeus\", \"diceratops\", \"dicraeosaurus\", \"dilophosaurus\", \"diplodocus\", \"dracorex\",", "10\", \"Windows 7\", \"Windows 8\", \"Windows 8.1\", \"Zorin\", \"elementaryOS\", \"macOS\",", "License (CDDL-1.0)\", \"Eclipse Public License (EPL-1.0)\", ] PROGRAMMING_LANGS = [", "\"iguanodon\", \"indosuchus\", \"ingenia\", \"irritator\", \"isisaurus\", \"janenschia\", \"jaxartosaurus\", \"jingshanosaurus\", \"jinzhousaurus\", \"jobaria\",", "\"accolade\", \"acrimony\", \"angst\", \"anomaly\", \"antidote\", \"baroque\", \"bona_fide\", \"bourgeois\", \"bravado\", \"brogue\",", "\"Windows 8.1\", \"Zorin\", \"elementaryOS\", \"macOS\", \"openSUSE\", ] FOLDERS = [", "\"baroque\", \"bona_fide\", \"bourgeois\", \"bravado\", \"brogue\", \"brusque\", \"cacophony\", \"caustic\", \"charisma\", \"cloying\",", "License\", \"The BSD 2-Clause License\", \"GNU General Public License (GPL)\",", "\"dryptosaurus\", \"dubreuillosaurus\", \"edmontonia\", \"edmontosaurus\", \"einiosaurus\", \"elaphrosaurus\", \"emausaurus\", \"eolambia\", \"eoraptor\", \"eotyrannus\",", "\"dromaeosaurus\", \"dromiceiomimus\", \"dryosaurus\", \"dryptosaurus\", \"dubreuillosaurus\", \"edmontonia\", \"edmontosaurus\", \"einiosaurus\", \"elaphrosaurus\", \"emausaurus\",", "\"Music\", \"Video\", \"Work\", \"Pictures\", \"Desktop\", \"Study\", ] PROJECT_NAMES = [" ]
[ "might be useful. It could, however, be not useful if", "\"\"\" A preliminary attempt at parsing an RST file's math", "be useful. It could, however, be not useful if there's", "math syntax in order to make math render as inline", "a pandoc option for converting .md to .rst that makes", "as of yet but might be useful. It could, however,", "be not useful if there's a pandoc option for converting", "converting .md to .rst that makes math inline and not", "order to make math render as inline rather than display", "math render as inline rather than display mode. This doesn't", "useful. It could, however, be not useful if there's a", "s = \"\"\"Define .. math:: v_{des} as the desired velocity,", "to .rst that makes math inline and not display. Keeping", "This doesn't work as of yet but might be useful.", "mode. This doesn't work as of yet but might be", "= \"\"\"Define .. math:: v_{des} as the desired velocity, ..", "preliminary attempt at parsing an RST file's math syntax in", "and not display. Keeping it around, though. \"\"\" import re", "rather than display mode. This doesn't work as of yet", "there's a pandoc option for converting .md to .rst that", "could, however, be not useful if there's a pandoc option", ".. math:: v_{des} as the desired velocity, .. math:: 1^k", "length\"\"\" with open('/Users/nishant/Downloads/tutorialtest.rst', 'r') as myfile: s = myfile.read() print([elem[11:-2]", "useful if there's a pandoc option for converting .md to", "inline rather than display mode. This doesn't work as of", "file's math syntax in order to make math render as", "of ones of length\"\"\" with open('/Users/nishant/Downloads/tutorialtest.rst', 'r') as myfile: s", "A preliminary attempt at parsing an RST file's math syntax", "\"\"\"Define .. math:: v_{des} as the desired velocity, .. math::", "velocity, .. math:: 1^k a vector of ones of length\"\"\"", "inline and not display. Keeping it around, though. \"\"\" import", "option for converting .md to .rst that makes math inline", ".rst that makes math inline and not display. Keeping it", "not display. Keeping it around, though. \"\"\" import re s", "math:: v_{des} as the desired velocity, .. math:: 1^k a", "display. Keeping it around, though. \"\"\" import re s =", "an RST file's math syntax in order to make math", "of yet but might be useful. It could, however, be", "however, be not useful if there's a pandoc option for", "syntax in order to make math render as inline rather", "yet but might be useful. It could, however, be not", "vector of ones of length\"\"\" with open('/Users/nishant/Downloads/tutorialtest.rst', 'r') as myfile:", "\"\"\" import re s = \"\"\"Define .. math:: v_{des} as", "not useful if there's a pandoc option for converting .md", "make math render as inline rather than display mode. This", "Keeping it around, though. \"\"\" import re s = \"\"\"Define", ".md to .rst that makes math inline and not display.", "re s = \"\"\"Define .. math:: v_{des} as the desired", "open('/Users/nishant/Downloads/tutorialtest.rst', 'r') as myfile: s = myfile.read() print([elem[11:-2] for elem", "around, though. \"\"\" import re s = \"\"\"Define .. math::", "as inline rather than display mode. This doesn't work as", "1^k a vector of ones of length\"\"\" with open('/Users/nishant/Downloads/tutorialtest.rst', 'r')", "myfile: s = myfile.read() print([elem[11:-2] for elem in re.findall('\\n.. math::", "it around, though. \"\"\" import re s = \"\"\"Define ..", "s = myfile.read() print([elem[11:-2] for elem in re.findall('\\n.. math:: *\\S*\\n\\n',", "pandoc option for converting .md to .rst that makes math", "render as inline rather than display mode. This doesn't work", "a vector of ones of length\"\"\" with open('/Users/nishant/Downloads/tutorialtest.rst', 'r') as", "desired velocity, .. math:: 1^k a vector of ones of", "for converting .md to .rst that makes math inline and", "makes math inline and not display. Keeping it around, though.", "It could, however, be not useful if there's a pandoc", "attempt at parsing an RST file's math syntax in order", "with open('/Users/nishant/Downloads/tutorialtest.rst', 'r') as myfile: s = myfile.read() print([elem[11:-2] for", "that makes math inline and not display. Keeping it around,", "of length\"\"\" with open('/Users/nishant/Downloads/tutorialtest.rst', 'r') as myfile: s = myfile.read()", "the desired velocity, .. math:: 1^k a vector of ones", "in order to make math render as inline rather than", "work as of yet but might be useful. It could,", "than display mode. This doesn't work as of yet but", "at parsing an RST file's math syntax in order to", ".. math:: 1^k a vector of ones of length\"\"\" with", "= myfile.read() print([elem[11:-2] for elem in re.findall('\\n.. math:: *\\S*\\n\\n', s)])", "math:: 1^k a vector of ones of length\"\"\" with open('/Users/nishant/Downloads/tutorialtest.rst',", "but might be useful. It could, however, be not useful", "RST file's math syntax in order to make math render", "ones of length\"\"\" with open('/Users/nishant/Downloads/tutorialtest.rst', 'r') as myfile: s =", "as myfile: s = myfile.read() print([elem[11:-2] for elem in re.findall('\\n..", "'r') as myfile: s = myfile.read() print([elem[11:-2] for elem in", "math inline and not display. Keeping it around, though. \"\"\"", "<filename>docs/mathparse.py \"\"\" A preliminary attempt at parsing an RST file's", "doesn't work as of yet but might be useful. It", "import re s = \"\"\"Define .. math:: v_{des} as the", "though. \"\"\" import re s = \"\"\"Define .. math:: v_{des}", "parsing an RST file's math syntax in order to make", "if there's a pandoc option for converting .md to .rst", "v_{des} as the desired velocity, .. math:: 1^k a vector", "to make math render as inline rather than display mode.", "display mode. This doesn't work as of yet but might", "as the desired velocity, .. math:: 1^k a vector of" ]
[ "RoundRect(Shape): __slots__ = ('fillcolor', 'radius') def __init__(self, name, fillcolor, radius,", "seq def draw(self, w, h): g = self.graphics g.clear() for", "width) self.addChild(self.textline) @package('layout') class CenteredLine(TextLine): def __init__(self, format, text, name,", "fillcolor, radius, states): super().__init__(name, states) self.fillcolor = fillcolor self.radius =", "= text super().__init__(name, states) def draw(self, width, height): if self.textline:", "flash.text.engine import TextBlock, TextElement @package('layout') class Poly(Shape): __slots__ = ('fillcolor',", "super().__init__(name, states) def draw(self, width, height): if self.textline: self.removeChild(self.textline) tb", "int(line[idx][1]*h)) g.endFill() @package('layout') class RoundRect(Shape): __slots__ = ('fillcolor', 'radius') def", "def __init__(self, name, fillcolor, seq, states): super().__init__(name, states) self.fillcolor =", "g.moveTo(int(line[0][0]*w), int(line[0][1]*h)) for idx in range(1, line.length): g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h)) g.endFill()", "__init__(self, format, text, name, states): super().__init__(format, text, name, states) def", "= ('format', 'text', 'textline') def __init__(self, format, text, name, states):", "TextElement(self.text, self.format) self.textline = tb.createTextLine(None, width) self.addChild(self.textline) @package('layout') class CenteredLine(TextLine):", "states): self.format = format self.text = text super().__init__(name, states) def", "super().draw(width, height) self.textline.x = int((width - self.textline.width)/2) self.textline.y = int((height", "radius def draw(self, width, height): g = self.graphics g.clear() g.beginFill(self.fillcolor)", "from layout import Shape, Widget from flash.text.engine import TextBlock, TextElement", "text super().__init__(name, states) def draw(self, width, height): if self.textline: self.removeChild(self.textline)", "g.endFill() @package('layout') class TextLine(Widget): __slots__ = ('format', 'text', 'textline') def", "g.drawRoundRect(0, 0, width, height, self.radius, self.radius) g.endFill() @package('layout') class TextLine(Widget):", "states) def draw(self, width, height): if self.textline: self.removeChild(self.textline) tb =", "self.radius, self.radius) g.endFill() @package('layout') class TextLine(Widget): __slots__ = ('format', 'text',", "def __init__(self, format, text, name, states): super().__init__(format, text, name, states)", "g.endFill() @package('layout') class RoundRect(Shape): __slots__ = ('fillcolor', 'radius') def __init__(self,", "in values(self.sequence): g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w), int(line[0][1]*h)) for idx in range(1, line.length):", "self.fillcolor = fillcolor self.radius = radius def draw(self, width, height):", "range(1, line.length): g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h)) g.endFill() @package('layout') class RoundRect(Shape): __slots__ =", "states) self.fillcolor = fillcolor self.sequence = seq def draw(self, w,", "@package('layout') class Poly(Shape): __slots__ = ('fillcolor', 'sequence') def __init__(self, name,", "Poly(Shape): __slots__ = ('fillcolor', 'sequence') def __init__(self, name, fillcolor, seq,", "self.radius) g.endFill() @package('layout') class TextLine(Widget): __slots__ = ('format', 'text', 'textline')", "name, fillcolor, seq, states): super().__init__(name, states) self.fillcolor = fillcolor self.sequence", "super().__init__(format, text, name, states) def draw(self, width, height): super().draw(width, height)", "g.beginFill(self.fillcolor) g.drawRoundRect(0, 0, width, height, self.radius, self.radius) g.endFill() @package('layout') class", "self.graphics g.clear() for line in values(self.sequence): g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w), int(line[0][1]*h)) for", "Shape, Widget from flash.text.engine import TextBlock, TextElement @package('layout') class Poly(Shape):", "import TextBlock, TextElement @package('layout') class Poly(Shape): __slots__ = ('fillcolor', 'sequence')", "= TextElement(self.text, self.format) self.textline = tb.createTextLine(None, width) self.addChild(self.textline) @package('layout') class", "super().__init__(name, states) self.fillcolor = fillcolor self.sequence = seq def draw(self,", "self.textline = tb.createTextLine(None, width) self.addChild(self.textline) @package('layout') class CenteredLine(TextLine): def __init__(self,", "self.format) self.textline = tb.createTextLine(None, width) self.addChild(self.textline) @package('layout') class CenteredLine(TextLine): def", "g = self.graphics g.clear() g.beginFill(self.fillcolor) g.drawRoundRect(0, 0, width, height, self.radius,", "= tb.createTextLine(None, width) self.addChild(self.textline) @package('layout') class CenteredLine(TextLine): def __init__(self, format,", "TextLine(Widget): __slots__ = ('format', 'text', 'textline') def __init__(self, format, text,", "format, text, name, states): self.format = format self.text = text", "self.graphics g.clear() g.beginFill(self.fillcolor) g.drawRoundRect(0, 0, width, height, self.radius, self.radius) g.endFill()", "states): super().__init__(name, states) self.fillcolor = fillcolor self.sequence = seq def", "'text', 'textline') def __init__(self, format, text, name, states): self.format =", "g.clear() for line in values(self.sequence): g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w), int(line[0][1]*h)) for idx", "Widget from flash.text.engine import TextBlock, TextElement @package('layout') class Poly(Shape): __slots__", "g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w), int(line[0][1]*h)) for idx in range(1, line.length): g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h))", "tb.createTextLine(None, width) self.addChild(self.textline) @package('layout') class CenteredLine(TextLine): def __init__(self, format, text,", "text, name, states): self.format = format self.text = text super().__init__(name,", "TextBlock, TextElement @package('layout') class Poly(Shape): __slots__ = ('fillcolor', 'sequence') def", "('fillcolor', 'radius') def __init__(self, name, fillcolor, radius, states): super().__init__(name, states)", "def __init__(self, name, fillcolor, radius, states): super().__init__(name, states) self.fillcolor =", "self.addChild(self.textline) @package('layout') class CenteredLine(TextLine): def __init__(self, format, text, name, states):", "fillcolor self.radius = radius def draw(self, width, height): g =", "tb.content = TextElement(self.text, self.format) self.textline = tb.createTextLine(None, width) self.addChild(self.textline) @package('layout')", "height): if self.textline: self.removeChild(self.textline) tb = TextBlock() tb.content = TextElement(self.text,", "states): super().__init__(format, text, name, states) def draw(self, width, height): super().draw(width,", "__init__(self, name, fillcolor, seq, states): super().__init__(name, states) self.fillcolor = fillcolor", "@package('layout') class TextLine(Widget): __slots__ = ('format', 'text', 'textline') def __init__(self,", "TextElement @package('layout') class Poly(Shape): __slots__ = ('fillcolor', 'sequence') def __init__(self,", "seq, states): super().__init__(name, states) self.fillcolor = fillcolor self.sequence = seq", "g = self.graphics g.clear() for line in values(self.sequence): g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w),", "= self.graphics g.clear() for line in values(self.sequence): g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w), int(line[0][1]*h))", "self.text = text super().__init__(name, states) def draw(self, width, height): if", "@package('layout') class RoundRect(Shape): __slots__ = ('fillcolor', 'radius') def __init__(self, name,", "height) self.textline.x = int((width - self.textline.width)/2) self.textline.y = int((height -", "__slots__ = ('fillcolor', 'radius') def __init__(self, name, fillcolor, radius, states):", "class Poly(Shape): __slots__ = ('fillcolor', 'sequence') def __init__(self, name, fillcolor,", "in range(1, line.length): g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h)) g.endFill() @package('layout') class RoundRect(Shape): __slots__", "def draw(self, width, height): g = self.graphics g.clear() g.beginFill(self.fillcolor) g.drawRoundRect(0,", "= radius def draw(self, width, height): g = self.graphics g.clear()", "self.format = format self.text = text super().__init__(name, states) def draw(self,", "width, height): super().draw(width, height) self.textline.x = int((width - self.textline.width)/2) self.textline.y", "for idx in range(1, line.length): g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h)) g.endFill() @package('layout') class", "self.textline: self.removeChild(self.textline) tb = TextBlock() tb.content = TextElement(self.text, self.format) self.textline", "width, height): if self.textline: self.removeChild(self.textline) tb = TextBlock() tb.content =", "= ('fillcolor', 'radius') def __init__(self, name, fillcolor, radius, states): super().__init__(name,", "self.sequence = seq def draw(self, w, h): g = self.graphics", "for line in values(self.sequence): g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w), int(line[0][1]*h)) for idx in", "def __init__(self, format, text, name, states): self.format = format self.text", "import Shape, Widget from flash.text.engine import TextBlock, TextElement @package('layout') class", "format, text, name, states): super().__init__(format, text, name, states) def draw(self,", "self.removeChild(self.textline) tb = TextBlock() tb.content = TextElement(self.text, self.format) self.textline =", "g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h)) g.endFill() @package('layout') class RoundRect(Shape): __slots__ = ('fillcolor', 'radius')", "draw(self, width, height): super().draw(width, height) self.textline.x = int((width - self.textline.width)/2)", "fillcolor self.sequence = seq def draw(self, w, h): g =", "= fillcolor self.sequence = seq def draw(self, w, h): g", "class TextLine(Widget): __slots__ = ('format', 'text', 'textline') def __init__(self, format,", "int(line[0][1]*h)) for idx in range(1, line.length): g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h)) g.endFill() @package('layout')", "height): g = self.graphics g.clear() g.beginFill(self.fillcolor) g.drawRoundRect(0, 0, width, height,", "def draw(self, width, height): if self.textline: self.removeChild(self.textline) tb = TextBlock()", "= ('fillcolor', 'sequence') def __init__(self, name, fillcolor, seq, states): super().__init__(name,", "layout import Shape, Widget from flash.text.engine import TextBlock, TextElement @package('layout')", "__slots__ = ('format', 'text', 'textline') def __init__(self, format, text, name,", "('format', 'text', 'textline') def __init__(self, format, text, name, states): self.format", "('fillcolor', 'sequence') def __init__(self, name, fillcolor, seq, states): super().__init__(name, states)", "'sequence') def __init__(self, name, fillcolor, seq, states): super().__init__(name, states) self.fillcolor", "0, width, height, self.radius, self.radius) g.endFill() @package('layout') class TextLine(Widget): __slots__", "@package('layout') class CenteredLine(TextLine): def __init__(self, format, text, name, states): super().__init__(format,", "__slots__ = ('fillcolor', 'sequence') def __init__(self, name, fillcolor, seq, states):", "def draw(self, w, h): g = self.graphics g.clear() for line", "= TextBlock() tb.content = TextElement(self.text, self.format) self.textline = tb.createTextLine(None, width)", "CenteredLine(TextLine): def __init__(self, format, text, name, states): super().__init__(format, text, name,", "if self.textline: self.removeChild(self.textline) tb = TextBlock() tb.content = TextElement(self.text, self.format)", "w, h): g = self.graphics g.clear() for line in values(self.sequence):", "self.fillcolor = fillcolor self.sequence = seq def draw(self, w, h):", "format self.text = text super().__init__(name, states) def draw(self, width, height):", "draw(self, width, height): if self.textline: self.removeChild(self.textline) tb = TextBlock() tb.content", "self.radius = radius def draw(self, width, height): g = self.graphics", "tb = TextBlock() tb.content = TextElement(self.text, self.format) self.textline = tb.createTextLine(None,", "radius, states): super().__init__(name, states) self.fillcolor = fillcolor self.radius = radius", "text, name, states): super().__init__(format, text, name, states) def draw(self, width,", "= seq def draw(self, w, h): g = self.graphics g.clear()", "'textline') def __init__(self, format, text, name, states): self.format = format", "__init__(self, format, text, name, states): self.format = format self.text =", "'radius') def __init__(self, name, fillcolor, radius, states): super().__init__(name, states) self.fillcolor", "self.textline.x = int((width - self.textline.width)/2) self.textline.y = int((height - self.textline.height)/2)", "text, name, states) def draw(self, width, height): super().draw(width, height) self.textline.x", "fillcolor, seq, states): super().__init__(name, states) self.fillcolor = fillcolor self.sequence =", "states) self.fillcolor = fillcolor self.radius = radius def draw(self, width,", "def draw(self, width, height): super().draw(width, height) self.textline.x = int((width -", "super().__init__(name, states) self.fillcolor = fillcolor self.radius = radius def draw(self,", "name, states): super().__init__(format, text, name, states) def draw(self, width, height):", "height, self.radius, self.radius) g.endFill() @package('layout') class TextLine(Widget): __slots__ = ('format',", "height): super().draw(width, height) self.textline.x = int((width - self.textline.width)/2) self.textline.y =", "width, height): g = self.graphics g.clear() g.beginFill(self.fillcolor) g.drawRoundRect(0, 0, width,", "from flash.text.engine import TextBlock, TextElement @package('layout') class Poly(Shape): __slots__ =", "= fillcolor self.radius = radius def draw(self, width, height): g", "name, states): self.format = format self.text = text super().__init__(name, states)", "values(self.sequence): g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w), int(line[0][1]*h)) for idx in range(1, line.length): g.lineTo(int(line[idx][0]*w),", "line in values(self.sequence): g.beginFill(self.fillcolor) g.moveTo(int(line[0][0]*w), int(line[0][1]*h)) for idx in range(1,", "__init__(self, name, fillcolor, radius, states): super().__init__(name, states) self.fillcolor = fillcolor", "class RoundRect(Shape): __slots__ = ('fillcolor', 'radius') def __init__(self, name, fillcolor,", "= format self.text = text super().__init__(name, states) def draw(self, width,", "g.clear() g.beginFill(self.fillcolor) g.drawRoundRect(0, 0, width, height, self.radius, self.radius) g.endFill() @package('layout')", "states): super().__init__(name, states) self.fillcolor = fillcolor self.radius = radius def", "= self.graphics g.clear() g.beginFill(self.fillcolor) g.drawRoundRect(0, 0, width, height, self.radius, self.radius)", "name, fillcolor, radius, states): super().__init__(name, states) self.fillcolor = fillcolor self.radius", "TextBlock() tb.content = TextElement(self.text, self.format) self.textline = tb.createTextLine(None, width) self.addChild(self.textline)", "h): g = self.graphics g.clear() for line in values(self.sequence): g.beginFill(self.fillcolor)", "line.length): g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h)) g.endFill() @package('layout') class RoundRect(Shape): __slots__ = ('fillcolor',", "idx in range(1, line.length): g.lineTo(int(line[idx][0]*w), int(line[idx][1]*h)) g.endFill() @package('layout') class RoundRect(Shape):", "draw(self, width, height): g = self.graphics g.clear() g.beginFill(self.fillcolor) g.drawRoundRect(0, 0,", "states) def draw(self, width, height): super().draw(width, height) self.textline.x = int((width", "width, height, self.radius, self.radius) g.endFill() @package('layout') class TextLine(Widget): __slots__ =", "draw(self, w, h): g = self.graphics g.clear() for line in", "class CenteredLine(TextLine): def __init__(self, format, text, name, states): super().__init__(format, text,", "name, states) def draw(self, width, height): super().draw(width, height) self.textline.x =" ]
[ "particular path is the one we should already have a", "self.send_response(302) self.send_header('location', \"bad-2\") self.end_headers() elif self.path == \"/redirect/bad-3\": self.send_response(302) self.send_header('location',", "\"/filename_mime/content-disposition-quotes-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='lolercoaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-2\":", "self.send_header('location', \"to-1\") self.end_headers() elif self.path == \"/redirect/to-1\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-1\")", "\"/filename/path-only-trailing-slash/\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/content-disposition\": self.send_response(200) self.send_header('Content-Disposition',", "= start_server( assertion_class = None, from_wg = wg, skip_header_checks =", "self.path == \"/json/no-coding\": self.send_response(200) self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path", "elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ##################################################################################################################################", "self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "+ datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type',", "== \"/html-decode\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target Sucuri page!</title></head><body>Sucuri Redirected OK?</body></html>\") return", "a dict. Passed a %s\" & type(expected_headers) for key, val", "print(\"Path: \", self.path) # print(\"Headers: \", self.headers) # print(\"Cookie(s): \",", "log.error(\"Exception in handler!\") for line in traceback.format_exc().split(\"\\n\"): log.error(line) raise e", "elif self.path == '/cloudflare_under_attack_shit_2': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie',", "== \"/raw-txt\": self.send_response(200) self.send_header('Content-type', \"text/plain\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path", "assertion_class = None, from_wg = wg, skip_header_checks = True) print(\"running", "self.send_response(200) self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200)", "print(\"Fetch for \", self.path) # print(\"Cookies:\", self.headers.get_all('Cookie', failobj=[])) if self.headers.get_all('Cookie',", "= fp.read() self.server_version = \"cloudflare is garbage\" self.send_response(503) self.send_header('Server', \"cloudflare", "newurl) self.end_headers() elif self.path == \"/password/expect\": # print(\"Password\") # print(self.headers)", "b\"Root OK?\" cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS) t1 = cobj.compress(inb) + cobj.flush()", "path is the one we should already have a cookie", "return a response with an HTTP 200 status. log.info(\"Request for", "to just 2 requests (cookie bounce, and fetch). # Doing", "= cobj.compress(inb) + cobj.flush() self.wfile.write(t1) elif self.path == \"/compressed/gzip\": self.send_response(200)", "self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-2\") elif self.path == \"/redirect/from-3\": self.send_response(302) newurl =", "\"/\" cook['cookie_test_key']['domain'] = \"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"]", "value in expected_headers.items(): if (is_annoying_pjs or is_selenium_garbage_chromium or skip_header_checks) and", "PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString()) self.end_headers() body = \"<html><body>Setting", "return MockServerRequestHandler def get_free_port(): s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) s.bind(('localhost', 0))", "= logging.getLogger(\"Main.TestServer\") sucuri_reqs_1 = 0 sucuri_reqs_2 = 0 sucuri_reqs_3 =", "for line in traceback.format_exc().split(\"\\n\"): log.error(line) raise e return MockServerRequestHandler def", "= os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html') with open(fpath, \"rb\")", "== \"/password/expect\": # print(\"Password\") # print(self.headers) self.send_response(200) self.end_headers() if not", "= datetime.datetime.now() + datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\")", "port_override: mock_server_port = port_override else: mock_server_port = get_free_port() expected_headers =", "raise RuntimeError(\"Too many requests to sucuri_shit (%s)!\" % sucuri_reqs_1) #", "container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'cf_js_challenge_03_12_2018.html') with open(fpath,", "one we should already have a cookie for. # As", "== 'Accept': pass elif not skip_header_checks: v1 = value.replace(\" \",", "elif self.path == \"/password/expect\": # print(\"Password\") # print(self.headers) self.send_response(200) self.end_headers()", "_get_handler(self): # Process an HTTP GET request and return a", "== \"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='loler coaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "self.end_headers() elif self.path == \"/redirect/to-1\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-1\") elif self.path", "= base64.b64decode(passval) if passstr == b'lol:<PASSWORD>': self.wfile.write(b\"Password Ok?\") else: self.wfile.write(b\"Password", "\"/favicon.ico\": self.send_response(404) self.end_headers() elif self.path == \"/raw-txt\": self.send_response(200) self.send_header('Content-type', \"text/plain\")", "an unknown path ################################################################################################################################## else: test_context.assertEqual(self.path, \"This shouldn't happen!\") def", "mock server in a separate thread. # Daemon threads automatically", "self.end_headers() if not 'Authorization' in self.headers: self.wfile.write(b\"Password not sent!!\") return", "'/cloudflare_under_attack_shit_2': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value", "if self.path == \"/\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\")", "self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ################################################################################################################################## # Cookie stuff ##################################################################################################################################", "4 for x in range(retries + 1): try: mock_server =", "Sadface.jpg</body></html>\") elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\")", "cook_value == '04cbb56494ebedbcd19a61b2d728c478': # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "\"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\\"loler coaster.html\\\"\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "at all. elif (is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Language':", "and cook_value == cookie_key: self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie forwarded", "expected headers:\") # print(expected_headers) assert isinstance(expected_headers, dict), \"expected_headers must be", "be a dict. Passed a %s\" & type(expected_headers) for key,", "failobj=[])[0] cook_key, cook_value = cook.split(\"=\", 1) if cook_key == 'cookie_test_key'", "fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) ################################################################################################################################## # Cloudflare validation", "value.replace(\" \", \"\") v2 = self.headers[key] if v2 is None:", "################################################################################################################################## # Cloudflare validation ################################################################################################################################## elif self.path == '/cloudflare_under_attack_shit_2': if", "def do_GET(self): # Process an HTTP GET request and return", "# print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: return self._get_handler() except Exception", "{ 'is_annoying_pjs' : is_annoying_pjs, 'is_chromium' : is_chromium, 'is_selenium_garbage_chromium' : is_selenium_garbage_chromium,", "self.wfile.write(plain_contents) ################################################################################################################################## # Cloudflare validation ################################################################################################################################## elif self.path == '/cloudflare_under_attack_shit_2':", "self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/html-decode\": self.send_response(200) self.send_header('Content-type', \"text/html\")", "self.headers.get_all('Cookie', failobj=[])) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key,", "and key == 'Accept': pass elif not skip_header_checks: v1 =", "\"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit': nonlocal sucuri_reqs_1 sucuri_reqs_1", "\"hai\"}') elif self.path == \"/json/no-coding\": self.send_response(200) self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}')", "self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/path-only.txt\": self.send_response(200) self.end_headers()", "self.end_headers() self.wfile.write(b\"<html><body>Root OK?</body></html>\") elif self.path == \"/compressed/deflate\": self.send_response(200) self.send_header('Content-Encoding', 'deflate')", "log = logging.getLogger(\"Main.TestServer\") sucuri_reqs_1 = 0 sucuri_reqs_2 = 0 sucuri_reqs_3", "# print(\"Headers: \", self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try:", "Process an HTTP GET request and return a response with", "self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif self.path == \"/binary_ctnt\": self.send_response(200)", "import WebRequest def capture_expected_headers(expected_headers, test_context, is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False): #", "WebRequest.WebGetRobust() srv = start_server( assertion_class = None, from_wg = wg,", "# print(\"Password\") # print(self.headers) self.send_response(200) self.end_headers() if not 'Authorization' in", "page!</title></head><body>CF Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir,", "self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString()) self.end_headers() self.wfile.write(b\"<html><body>CF Cookie Test</body></html>\") elif self.path", "to sucuri_shit (%s)!\" % sucuri_reqs_1) # print(\"Fetch for \", self.path)", "== 'Accept-Language': pass elif (is_annoying_pjs or is_chromium or is_selenium_garbage_chromium or", "elif (is_annoying_pjs or is_chromium or is_selenium_garbage_chromium or skip_header_checks) and key", "failobj=[])) try: self.validate_headers() except Exception: self.send_response(500) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Headers", "cook['cloudflare_validate_key']['path'] = \"/\" cook['cloudflare_validate_key']['domain'] = \"\" expiration = datetime.datetime.now() +", "= fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path ==", "garbage\" self.send_response(503) self.send_header('Server', \"cloudflare is garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif", "sucuri_reqs_1 sucuri_reqs_1 += 1 if sucuri_reqs_1 > 4: raise RuntimeError(\"Too", "= None, is_selenium_garbage_chromium = False, is_annoying_pjs = False, skip_header_checks =", "= self.headers[key] if v2 is None: v2 = \"\" v2", "'cf_js_challenge_03_12_2018.html') with open(fpath, \"rb\") as fp: plain_contents = fp.read() self.server_version", "== \"/redirect/from-2\": self.send_response(302) self.send_header('uri', \"to-2\") self.end_headers() elif self.path == \"/redirect/to-2\":", "properly!</body></html>\") return self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie is missing</body></html>\") ##################################################################################################################################", "bounce, and fetch). # Doing that requires pulling html content", "and other assorted parameters via their API at all. elif", "inb = b\"Root OK?\" cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS) t1 = cobj.compress(inb)", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT\") elif self.path == \"/json/valid\": self.send_response(200)", ": \"hai\"}') elif self.path == \"/json/no-coding\": self.send_response(200) self.end_headers() self.wfile.write(b'{\"oh\" :", "requires pulling html content out of chromium, though. # Annoying.", "self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path == \"/json/no-coding\": self.send_response(200) self.end_headers()", "validation ################################################################################################################################## elif self.path == '/sucuri_shit_3': # I'd like to", "isinstance(expected_headers, dict), \"expected_headers must be a dict. Passed a %s\"", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) ################################################################################################################################## # Cloudflare validation ##################################################################################################################################", "BaseHTTPRequestHandler from http.server import HTTPServer from threading import Thread import", "val = self.headers['Authorization'] passval = val.split(\" \")[-1] passstr = base64.b64decode(passval)", "cook_key, cook_value = cook.split(\"=\", 1) if cook_key == 'cloudflare_validate_key' and", "value, self.headers[key], { 'is_annoying_pjs' : is_annoying_pjs, 'is_chromium' : is_chromium, 'is_selenium_garbage_chromium'", "if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target preemptive Sucuri", "in a separate thread. # Daemon threads automatically shut down", "s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) s.bind(('localhost', 0)) address, port = s.getsockname()", "from threading import Thread import WebRequest def capture_expected_headers(expected_headers, test_context, is_chromium=False,", "(is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Language': pass elif (is_annoying_pjs", "self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: self.validate_headers() except Exception:", "header, I guess. pass # Selenium is fucking retarded, and", "= cookies.SimpleCookie() cook['cookie_test_key'] = cookie_key cook['cookie_test_key']['path'] = \"/\" cook['cookie_test_key']['domain'] =", "\"/json/no-coding\": self.send_response(200) self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path == \"/filename/path-only.txt\":", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='lolercoaster.html'\") self.end_headers()", "elif self.path == \"/html-decode\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\")", "is_annoying_pjs = is_annoying_pjs, skip_header_checks = skip_header_checks ) retries = 4", "== \"/json/no-coding\": self.send_response(200) self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path ==", "failobj=[])[0] cook_key, cook_value = cook.split(\"=\", 1) if cook_key == 'cloudflare_validate_key'", "an HTTP GET request and return a response with an", "self.send_header('Content-Encoding', 'gzip') self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(gzip.compress(b\"Root OK?\")) elif self.path ==", "import uuid import socket import logging import os import base64", "\"/redirect/bad-2\": self.send_response(302) self.send_header('location', \"bad-2\") self.end_headers() elif self.path == \"/redirect/bad-3\": self.send_response(302)", "\"text/html\") self.end_headers() self.wfile.write(gzip.compress(b\"Root OK?\")) elif self.path == \"/json/invalid\": self.send_response(200) self.send_header('Content-type',", "\"/redirect/to-1\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-1\") elif self.path == \"/redirect/from-2\": self.send_response(302) self.send_header('uri',", "for. # As such, we expect one request only nonlocal", "fp: plain_contents = fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif", "os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'cf_js_challenge_03_12_2018.html') with open(fpath, \"rb\") as", "I guess. pass # Selenium is fucking retarded, and I", "mock_server_port = get_free_port() expected_headers = dict(from_wg.browserHeaders) print(from_wg) print(expected_headers) assert isinstance(expected_headers,", "= os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'cf_js_challenge_03_12_2018.html') with open(fpath, \"rb\")", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit_2': #", "(is_annoying_pjs or is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Encoding': #", "(p2)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html')", "# Annoying. nonlocal sucuri_reqs_3 sucuri_reqs_3 += 1 if sucuri_reqs_3 >", "request only nonlocal sucuri_reqs_2 sucuri_reqs_2 += 1 if sucuri_reqs_2 >", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/redirect/bad-1\": self.send_response(302) self.end_headers() elif self.path", "self.end_headers() self.wfile.write(b\"<html><head><title>I can haz title?</title></head><body>This page has a title!</body></html>\") elif", "self.wfile.write(b\"Headers failed validation!\") raise if self.path == \"/\": self.send_response(200) self.send_header('Content-type',", "fpath = os.path.join(container_dir, \"waf_garbage\", 'cf_js_challenge_03_12_2018.html') with open(fpath, \"rb\") as fp:", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target Sucuri page!</title></head><body>Sucuri Redirected OK?</body></html>\")", "for URL path: '%s'\", self.path) # print(\"Headers: \", self.headers) #", "\"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/redirect/bad-1\": self.send_response(302) self.end_headers() elif", "x >= retries: raise # Start running mock server in", "'/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook = cookies.SimpleCookie() cook['cloudflare_validate_key'] = cookie_key cook['cloudflare_validate_key']['path'] = \"/\"", "%H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString()) self.end_headers() body =", "'gzip') self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(gzip.compress(b\"Root OK?\")) elif self.path == \"/json/invalid\":", "key == 'Accept-Language': pass elif (is_annoying_pjs or is_chromium or is_selenium_garbage_chromium", "elif self.path == \"/redirect/bad-1\": self.send_response(302) self.end_headers() elif self.path == \"/redirect/bad-2\":", "\", self.path) # print(\"Cookies:\", self.headers.get_all('Cookie', failobj=[])) if self.headers.get_all('Cookie', failobj=[]): cook", "'{}' -> '{}' ({})\".format( key, value, self.headers[key], { 'is_annoying_pjs' :", "self.path) # print(\"Headers: \", self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[]))", "headers # Just ignore that particular header, I guess. pass", "key, val in expected_headers.items(): assert isinstance(key, str) assert isinstance(val, str)", "status. # print(\"Path: \", self.path) # print(\"Headers: \", self.headers) #", "base64.b64decode(passval) if passstr == b'lol:<PASSWORD>': self.wfile.write(b\"Password Ok?\") else: self.wfile.write(b\"Password Bad!\")", "missing</body></html>\") ################################################################################################################################## # Sucuri validation ################################################################################################################################## elif self.path == '/sucuri_shit_3':", "self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook = cookies.SimpleCookie() cook['cloudflare_validate_key'] = cookie_key cook['cloudflare_validate_key']['path']", "in expected_headers.items(): assert isinstance(key, str) assert isinstance(val, str) cookie_key =", "self.headers[key], { 'is_annoying_pjs' : is_annoying_pjs, 'is_chromium' : is_chromium, 'is_selenium_garbage_chromium' :", "self.wfile.write(b\"<html><body>Cookie forwarded properly!</body></html>\") return self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie is", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target CF page!</title></head><body>CF Redirected OK?</body></html>\")", "page has no title. Sadface.jpg</body></html>\") elif self.path == \"/binary_ctnt\": self.send_response(200)", "= 0 class MockServerRequestHandler(BaseHTTPRequestHandler): def log_message(self, format, *args): return def", "import base64 import zlib import gzip import time import datetime", "elif self.path == \"/content/have-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head><title>I can haz title?</title></head><body>This", "pass elif (is_annoying_pjs or is_chromium or is_selenium_garbage_chromium or skip_header_checks) and", "cookie_key: self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie forwarded properly!</body></html>\") return self.send_response(200)", "== \"/filename_mime/explicit-html-mime\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "import logging import os import base64 import zlib import gzip", "self.end_headers() self.wfile.write(gzip.compress(b\"Root OK?\")) elif self.path == \"/json/invalid\": self.send_response(200) self.send_header('Content-type', \"text/html\")", "if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value =", "\"text/html\") self.end_headers() self.wfile.write(b\"Headers failed validation!\") raise if self.path == \"/\":", "== b'lol:<PASSWORD>': self.wfile.write(b\"Password Ok?\") else: self.wfile.write(b\"Password Bad!\") elif self.path ==", "fp.read() self.server_version = \"cloudflare is garbage\" self.send_response(503) self.send_header('Server', \"cloudflare is", "import zlib import gzip import time import datetime from http", "\"filename='lolercoaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\")", "unknown path ################################################################################################################################## else: test_context.assertEqual(self.path, \"This shouldn't happen!\") def do_GET(self):", "class MockServerRequestHandler(BaseHTTPRequestHandler): def log_message(self, format, *args): return def validate_headers(self): for", "elif self.path == '/sucuri_shit_2': # This particular path is the", "= True) print(\"running server on port: \", srv) while 1:", "cook_value == cookie_key: # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "elif self.path == \"/compressed/gzip\": self.send_response(200) self.send_header('Content-Encoding', 'gzip') self.send_header('Content-type', \"text/html\") self.end_headers()", "'/cloudflare_under_attack_shit': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value", ": is_annoying_pjs, 'is_chromium' : is_chromium, 'is_selenium_garbage_chromium' : is_selenium_garbage_chromium, 'skip_header_checks' :", "== \"/redirect/to-1\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-1\") elif self.path == \"/redirect/from-2\": self.send_response(302)", "print(\"Capturing expected headers:\") # print(expected_headers) assert isinstance(expected_headers, dict), \"expected_headers must", "+ cobj.flush() self.wfile.write(t1) elif self.path == \"/compressed/gzip\": self.send_response(200) self.send_header('Content-Encoding', 'gzip')", "sucuri_reqs_3) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value", "v2 = \"\" v2 = v2.replace(\" \", \"\") test_context.assertEqual(v1, v2,", "pass # Selenium is fucking retarded, and I can't override", "RuntimeError(\"Too many requests to sucuri_shit (%s)!\" % sucuri_reqs_1) # print(\"Fetch", "= fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) ################################################################################################################################## # Cloudflare", "v2.replace(\" \", \"\") test_context.assertEqual(v1, v2, msg=\"Mismatch in header parameter '{}'", "fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit_2':", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\")", "self.send_header('Content-Disposition', \"filename=\\\"loler coaster.html\\\"\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/explicit-html-mime\": self.send_response(200)", "their API at all. elif (is_selenium_garbage_chromium or skip_header_checks) and key", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "else: self.wfile.write(b\"Password Bad!\") elif self.path == \"/content/have-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head><title>I", "import traceback import uuid import socket import logging import os", "socket import logging import os import base64 import zlib import", "print(expected_headers) assert isinstance(expected_headers, dict) captured_server = capture_expected_headers( expected_headers = expected_headers,", "cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target Sucuri page!</title></head><body>Sucuri Redirected", "\"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target Sucuri page!</title></head><body>Sucuri Redirected OK?</body></html>\") return container_dir", "= self.headers['Authorization'] passval = val.split(\" \")[-1] passstr = base64.b64decode(passval) if", "b'lol:<PASSWORD>': self.wfile.write(b\"Password Ok?\") else: self.wfile.write(b\"Password Bad!\") elif self.path == \"/content/have-title\":", "headers:\") # print(expected_headers) assert isinstance(expected_headers, dict), \"expected_headers must be a", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers()", "self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value = cook.split(\"=\", 1) if cook_key ==", "self.send_response(302) newurl = \"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1]) self.send_header('uri', newurl) self.end_headers() elif self.path", "self.headers.get_all('Cookie', failobj=[])) try: return self._get_handler() except Exception as e: log.error(\"Exception", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\\"loler coaster.html\\\"\")", "OK (p3)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\",", "cook_key, cook_value = cook.split(\"=\", 1) if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and", "%s\" & type(expected_headers) for key, val in expected_headers.items(): assert isinstance(key,", "stuff ################################################################################################################################## elif self.path == '/cookie_test': cook = cookies.SimpleCookie() cook['cookie_test_key']", "expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S", "assert isinstance(val, str) cookie_key = <KEY> log = logging.getLogger(\"Main.TestServer\") sucuri_reqs_1", "== '<KEY>': # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At", "sucuri_reqs_3 sucuri_reqs_3 += 1 if sucuri_reqs_3 > 3: raise RuntimeError(\"Too", "the user-agent # and other assorted parameters via their API", "traceback import uuid import socket import logging import os import", "# So PhantomJS monkeys with accept-encoding headers # Just ignore", "that requires pulling html content out of chromium, though. #", "with open(fpath, \"rb\") as fp: plain_contents = fp.read() self.send_response(200) self.send_header('Content-type',", "garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cloudflare_under_attack_shit': if self.headers.get_all('Cookie',", "self.path == \"/filename_mime/content-disposition-quotes-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='lolercoaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "a response with an HTTP 200 status. # print(\"Path: \",", "self.end_headers() inb = b\"Root OK?\" cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS) t1 =", "\"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Root OK?</body></html>\") elif self.path == \"/compressed/deflate\": self.send_response(200) self.send_header('Content-Encoding',", "only nonlocal sucuri_reqs_2 sucuri_reqs_2 += 1 if sucuri_reqs_2 > 1:", "\"/filename_mime/explicit-html-mime\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "= Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True) mock_server_thread.start() return mock_server_port, mock_server, mock_server_thread if __name__", "user-agent # and other assorted parameters via their API at", "Exception: self.send_response(500) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Headers failed validation!\") raise if", "\"text/html\") self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString()) self.end_headers() self.wfile.write(b\"<html><body>CF Cookie Test</body></html>\") elif self.path ==", "is_selenium_garbage_chromium, is_annoying_pjs = is_annoying_pjs, skip_header_checks = skip_header_checks ) retries =", "\"/redirect/from-2\": self.send_response(302) self.send_header('uri', \"to-2\") self.end_headers() elif self.path == \"/redirect/to-2\": self.send_response(200)", "\"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\")", "= skip_header_checks ) retries = 4 for x in range(retries", "traceback.format_exc().split(\"\\n\"): log.error(line) raise e return MockServerRequestHandler def get_free_port(): s =", "self.send_response(503) self.send_header('Server', \"cloudflare is garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path", "== \"/redirect/bad-3\": self.send_response(302) self.send_header('location', \"gopher://www.google.com\") self.end_headers() elif self.path == \"/redirect/from-1\":", "print(self.headers) self.send_response(200) self.end_headers() if not 'Authorization' in self.headers: self.wfile.write(b\"Password not", "= dict(from_wg.browserHeaders) print(from_wg) print(expected_headers) assert isinstance(expected_headers, dict) captured_server = capture_expected_headers(", "fpath = os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html') with open(fpath, \"rb\") as fp:", "start_server( assertion_class = None, from_wg = wg, skip_header_checks = True)", "self.send_header('Server', \"cloudflare is garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path ==", "skip_header_checks ) retries = 4 for x in range(retries +", "cook['cookie_test_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie',", "import BaseHTTPRequestHandler from http.server import HTTPServer from threading import Thread", "self.end_headers() elif self.path == \"/redirect/bad-3\": self.send_response(302) self.send_header('location', \"gopher://www.google.com\") self.end_headers() elif", "is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Encoding': # So PhantomJS", "skip_header_checks) and key == 'Accept': pass elif not skip_header_checks: v1", "\"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/favicon.ico\": self.send_response(404) self.end_headers()", "'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478': # if cook[''] self.send_response(200) self.send_header('Content-type',", "elif self.path == \"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\\"loler coaster.html\\\"\") self.end_headers() self.wfile.write(b\"LOLWAT?\")", "\"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT\") elif self.path == \"/json/valid\": self.send_response(200) self.send_header('Content-type', \"text/html\")", "\"bad-2\") self.end_headers() elif self.path == \"/redirect/bad-3\": self.send_response(302) self.send_header('location', \"gopher://www.google.com\") self.end_headers()", "self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type',", "target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p2)?</body></html>\") return container_dir", "plain_contents = fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path", "is fucking retarded, and I can't override the user-agent #", "return val = self.headers['Authorization'] passval = val.split(\" \")[-1] passstr =", "= \"/\" cook['cookie_test_key']['domain'] = \"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30)", "self.headers.get_all('Cookie', failobj=[])) try: self.validate_headers() except Exception: self.send_response(500) self.send_header('Content-type', \"text/html\") self.end_headers()", "self.end_headers() self.wfile.write(b\"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p3)?</body></html>\")", "def _get_handler(self): # Process an HTTP GET request and return", "# print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: self.validate_headers() except Exception: self.send_response(500)", "== '/cookie_test': cook = cookies.SimpleCookie() cook['cookie_test_key'] = cookie_key cook['cookie_test_key']['path'] =", "t1 = cobj.compress(inb) + cobj.flush() self.wfile.write(t1) elif self.path == \"/compressed/gzip\":", "self.end_headers() self.wfile.write(plain_contents) ################################################################################################################################## # Cloudflare validation ################################################################################################################################## elif self.path ==", "elif self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook = cookies.SimpleCookie() cook['cloudflare_validate_key'] = cookie_key", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-html-suffix\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.end_headers() self.wfile.write(b\"LOLWAT?\")", "= 0 sucuri_reqs_2 = 0 sucuri_reqs_3 = 0 class MockServerRequestHandler(BaseHTTPRequestHandler):", "'is_selenium_garbage_chromium' : is_selenium_garbage_chromium, 'skip_header_checks' : skip_header_checks, }, ) ) def", "os import base64 import zlib import gzip import time import", "elif self.path == \"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='loler coaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\")", "not skip_header_checks: v1 = value.replace(\" \", \"\") v2 = self.headers[key]", "\"to-1\") self.end_headers() elif self.path == \"/redirect/to-1\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-1\") elif", "self.path == \"/\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif", "self.path == \"/raw-txt\": self.send_response(200) self.send_header('Content-type', \"text/plain\") self.end_headers() self.wfile.write(b\"Root OK?\") elif", "and cook_value == cookie_key: # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\")", "or skip_header_checks) and key == 'Accept-Encoding': # So PhantomJS monkeys", "print(\"Cookies:\", self.headers.get_all('Cookie', failobj=[])) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0]", "== \"/filename_mime/content-disposition-html-suffix\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "HTTPServer(('0.0.0.0', mock_server_port), captured_server) break except OSError: time.sleep(0.2) if x >=", "import os import base64 import zlib import gzip import time", "and key == 'Accept-Language': pass elif (is_annoying_pjs or is_chromium or", "# and other assorted parameters via their API at all.", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) ################################################################################################################################## # Cloudflare validation ################################################################################################################################## elif", "e: log.error(\"Exception in handler!\") for line in traceback.format_exc().split(\"\\n\"): log.error(line) raise", "HTTP 200 status. log.info(\"Request for URL path: '%s'\", self.path) #", "Just ignore that particular header, I guess. pass # Selenium", "\"/password/expect\": # print(\"Password\") # print(self.headers) self.send_response(200) self.end_headers() if not 'Authorization'", "do_GET(self): # Process an HTTP GET request and return a", "self.wfile.write(plain_contents) elif self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook = cookies.SimpleCookie() cook['cloudflare_validate_key'] =", "== \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif self.path ==", "for an unknown path ################################################################################################################################## else: test_context.assertEqual(self.path, \"This shouldn't happen!\")", "'is_chromium' : is_chromium, 'is_selenium_garbage_chromium' : is_selenium_garbage_chromium, 'skip_header_checks' : skip_header_checks, },", "sucuri_reqs_3 = 0 class MockServerRequestHandler(BaseHTTPRequestHandler): def log_message(self, format, *args): return", "target Sucuri page!</title></head><body>Sucuri Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath", "\"rb\") as fp: plain_contents = fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "fp: plain_contents = fp.read() self.server_version = \"cloudflare is garbage\" self.send_response(503)", "v1 = value.replace(\" \", \"\") v2 = self.headers[key] if v2", "port_override = None, is_chromium = None, is_selenium_garbage_chromium = False, is_annoying_pjs", "elif self.path == \"/filename_mime/content-disposition-quotes-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "'Authorization' in self.headers: self.wfile.write(b\"Password not sent!!\") return val = self.headers['Authorization']", "s.getsockname() s.close() return port def start_server(assertion_class, from_wg, port_override = None,", "port = s.getsockname() s.close() return port def start_server(assertion_class, from_wg, port_override", "self.wfile.write(b\"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p2)?</body></html>\") return", "return a response with an HTTP 200 status. # print(\"Path:", ") retries = 4 for x in range(retries + 1):", "True) print(\"running server on port: \", srv) while 1: time.sleep(1)", "cobj.flush() self.wfile.write(t1) elif self.path == \"/compressed/gzip\": self.send_response(200) self.send_header('Content-Encoding', 'gzip') self.send_header('Content-type',", "Annoying. nonlocal sucuri_reqs_3 sucuri_reqs_3 += 1 if sucuri_reqs_3 > 3:", "retries = 4 for x in range(retries + 1): try:", "= capture_expected_headers( expected_headers = expected_headers, test_context = assertion_class, is_chromium =", "requests to sucuri_shit (%s)!\" % sucuri_reqs_1) # print(\"Fetch for \",", "cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478': # if cook['']", "import gzip import time import datetime from http import cookies", "self.end_headers() elif self.path == \"/redirect/from-1\": self.send_response(302) self.send_header('location', \"to-1\") self.end_headers() elif", "return port def start_server(assertion_class, from_wg, port_override = None, is_chromium =", "self.send_response(302) self.send_header('uri', \"to-2\") self.end_headers() elif self.path == \"/redirect/to-2\": self.send_response(200) self.end_headers()", "= socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) s.bind(('localhost', 0)) address, port = s.getsockname() s.close()", "\"/json/valid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path", "waf circumvented OK (p2)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath =", "= HTTPServer(('0.0.0.0', mock_server_port), captured_server) break except OSError: time.sleep(0.2) if x", "self.send_response(500) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Headers failed validation!\") raise if self.path", "# Handle requests for an unknown path ################################################################################################################################## else: test_context.assertEqual(self.path,", "'sucuri_garbage.html') with open(fpath, \"rb\") as fp: plain_contents = fp.read() self.send_response(200)", "== '__main__': wg = WebRequest.WebGetRobust() srv = start_server( assertion_class =", "down when the main process exits. mock_server_thread = Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True)", "\", self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: return self._get_handler()", "cook['cookie_test_key'] = cookie_key cook['cookie_test_key']['path'] = \"/\" cook['cookie_test_key']['domain'] = \"\" expiration", "forwarded properly!</body></html>\") return self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie is missing</body></html>\")", "mock server. if port_override: mock_server_port = port_override else: mock_server_port =", "path: '%s'\", self.path) # print(\"Headers: \", self.headers) # print(\"Cookie(s): \",", "elif self.path == \"/json/valid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b'{\"oh\" :", "title!</body></html>\") elif self.path == \"/content/no-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head></head><body>This page has", "self.end_headers() elif self.path == \"/redirect/to-2\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-2\") elif self.path", "PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString()) self.end_headers() self.wfile.write(b\"<html><body>CF Cookie Test</body></html>\")", "GET request and return a response with an HTTP 200", "= \"cloudflare is garbage\" self.send_response(503) self.send_header('Server', \"cloudflare is garbage\") self.send_header('Content-type','text/html')", "HTTP 200 status. # print(\"Path: \", self.path) # print(\"Headers: \",", "= cook.split(\"=\", 1) if cook_key == 'cloudflare_validate_key' and cook_value ==", "is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept': pass elif not", "elif self.path == \"/redirect/to-1\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-1\") elif self.path ==", "self.wfile.write(plain_contents) elif self.path == '/sucuri_shit': nonlocal sucuri_reqs_1 sucuri_reqs_1 += 1", "in range(retries + 1): try: mock_server = HTTPServer(('0.0.0.0', mock_server_port), captured_server)", "\"/filename_mime/content-disposition-html-suffix\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-1\":", "as fp: plain_contents = fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents)", "'cloudflare_validate_key' and cook_value == cookie_key: # if cook[''] self.send_response(200) self.send_header('Content-type',", "(cookie bounce, and fetch). # Doing that requires pulling html", "assertion_class, is_chromium = is_chromium, is_selenium_garbage_chromium = is_selenium_garbage_chromium, is_annoying_pjs = is_annoying_pjs,", "range(retries + 1): try: mock_server = HTTPServer(('0.0.0.0', mock_server_port), captured_server) break", "\"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif self.path == \"/binary_ctnt\":", "is_selenium_garbage_chromium = is_selenium_garbage_chromium, is_annoying_pjs = is_annoying_pjs, skip_header_checks = skip_header_checks )", "passval = val.split(\" \")[-1] passstr = base64.b64decode(passval) if passstr ==", "Sucuri validation ################################################################################################################################## elif self.path == '/sucuri_shit_3': # I'd like", "down to just 2 requests (cookie bounce, and fetch). #", "skip_header_checks: v1 = value.replace(\" \", \"\") v2 = self.headers[key] if", "== \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ################################################################################################################################## # Cookie", "== '/sucuri_shit_2': # This particular path is the one we", "'{}' : '{}' -> '{}' ({})\".format( key, value, self.headers[key], {", "self.send_response(200) self.send_header('Content-Disposition', \"filename='lolercoaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-2\": self.send_response(200)", "import datetime from http import cookies from http.server import BaseHTTPRequestHandler", "elif self.path == \"/redirect/bad-3\": self.send_response(302) self.send_header('location', \"gopher://www.google.com\") self.end_headers() elif self.path", ") ) def _get_handler(self): # Process an HTTP GET request", "== \"/filename_mime/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "expected_headers, test_context = assertion_class, is_chromium = is_chromium, is_selenium_garbage_chromium = is_selenium_garbage_chromium,", "chromium, though. # Annoying. nonlocal sucuri_reqs_3 sucuri_reqs_3 += 1 if", "except Exception: self.send_response(500) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Headers failed validation!\") raise", "mock_server_thread.setDaemon(True) mock_server_thread.start() return mock_server_port, mock_server, mock_server_thread if __name__ == '__main__':", "v2 = self.headers[key] if v2 is None: v2 = \"\"", "open(fpath, \"rb\") as fp: plain_contents = fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\")", "cobj.compress(inb) + cobj.flush() self.wfile.write(t1) elif self.path == \"/compressed/gzip\": self.send_response(200) self.send_header('Content-Encoding',", "validation!\") raise if self.path == \"/\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "\"This shouldn't happen!\") def do_GET(self): # Process an HTTP GET", "and return a response with an HTTP 200 status. log.info(\"Request", "logging import os import base64 import zlib import gzip import", "sucuri_shit_3 (%s)!\" % sucuri_reqs_3) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie',", "################################################################################################################################## # Handle requests for an unknown path ################################################################################################################################## else:", "API at all. elif (is_selenium_garbage_chromium or skip_header_checks) and key ==", "PhantomJS monkeys with accept-encoding headers # Just ignore that particular", "\"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ################################################################################################################################## # Cookie stuff", "os.path.join(container_dir, \"waf_garbage\", 'cf_js_challenge_03_12_2018.html') with open(fpath, \"rb\") as fp: plain_contents =", "of chromium, though. # Annoying. nonlocal sucuri_reqs_3 sucuri_reqs_3 += 1", "= expected_headers, test_context = assertion_class, is_chromium = is_chromium, is_selenium_garbage_chromium =", "\"gopher://www.google.com\") self.end_headers() elif self.path == \"/redirect/from-1\": self.send_response(302) self.send_header('location', \"to-1\") self.end_headers()", "self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head></head><body>This page has no title. Sadface.jpg</body></html>\") elif self.path", "\"/redirect/bad-1\": self.send_response(302) self.end_headers() elif self.path == \"/redirect/bad-2\": self.send_response(302) self.send_header('location', \"bad-2\")", "None, is_chromium = None, is_selenium_garbage_chromium = False, is_annoying_pjs = False,", "sucuri_reqs_2 += 1 if sucuri_reqs_2 > 1: raise RuntimeError(\"Too many", "val.split(\" \")[-1] passstr = base64.b64decode(passval) if passstr == b'lol:<PASSWORD>': self.wfile.write(b\"Password", "\"\") test_context.assertEqual(v1, v2, msg=\"Mismatch in header parameter '{}' : '{}'", "\"to-2\") self.end_headers() elif self.path == \"/redirect/to-2\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-2\") elif", "+ 1): try: mock_server = HTTPServer(('0.0.0.0', mock_server_port), captured_server) break except", "is garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cloudflare_under_attack_shit': if", "self.send_header('Content-type', \"text/html\") self.end_headers() inb = b\"Root OK?\" cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS)", "\"rb\") as fp: plain_contents = fp.read() self.server_version = \"cloudflare is", "################################################################################################################################## elif self.path == '/sucuri_shit_3': # I'd like to get", "% sucuri_reqs_3) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key,", "x in range(retries + 1): try: mock_server = HTTPServer(('0.0.0.0', mock_server_port),", "\"filename=lolercoaster.html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='lolercoaster.html'\")", "skip_header_checks = False ): # Configure mock server. if port_override:", "a response with an HTTP 200 status. log.info(\"Request for URL", "== 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478': # if cook[''] self.send_response(200)", "self.path == \"/redirect/to-1\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-1\") elif self.path == \"/redirect/from-2\":", "if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '<KEY>': # if", "\"/content/have-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head><title>I can haz title?</title></head><body>This page has a", "mock_server_thread.start() return mock_server_port, mock_server, mock_server_thread if __name__ == '__main__': wg", ": is_chromium, 'is_selenium_garbage_chromium' : is_selenium_garbage_chromium, 'skip_header_checks' : skip_header_checks, }, )", "cook_key, cook_value = cook.split(\"=\", 1) if cook_key == 'cookie_test_key' and", "= cookie_key cook['cloudflare_validate_key']['path'] = \"/\" cook['cloudflare_validate_key']['domain'] = \"\" expiration =", "raise RuntimeError(\"Too many requests to sucuri_shit_2 (%s)!\" % sucuri_reqs_2) if", "sucuri_reqs_1 = 0 sucuri_reqs_2 = 0 sucuri_reqs_3 = 0 class", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit_2': # This", "print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: return self._get_handler() except Exception as", "== \"/html/real\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Root OK?</body></html>\") elif self.path", "\"filename='loler coaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200) self.send_header('Content-Disposition',", "cook['cookie_test_key']['path'] = \"/\" cook['cookie_test_key']['domain'] = \"\" expiration = datetime.datetime.now() +", "raise RuntimeError(\"Too many requests to sucuri_shit_3 (%s)!\" % sucuri_reqs_3) if", "Handle requests for an unknown path ################################################################################################################################## else: test_context.assertEqual(self.path, \"This", "or skip_header_checks) and key == 'Accept': pass elif not skip_header_checks:", "cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value = cook.split(\"=\", 1) if", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-html-suffix\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.end_headers()", "== \"/filename_mime/content-disposition-quotes-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='lolercoaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "cook['cloudflare_validate_key'] = cookie_key cook['cloudflare_validate_key']['path'] = \"/\" cook['cloudflare_validate_key']['domain'] = \"\" expiration", "\"\") v2 = self.headers[key] if v2 is None: v2 =", "self.send_response(200) self.send_header('Content-Encoding', 'deflate') self.send_header('Content-type', \"text/html\") self.end_headers() inb = b\"Root OK?\"", "<gh_stars>0 import traceback import uuid import socket import logging import", "cook['cloudflare_validate_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie',", "self.wfile.write(b\"Redirect-To-1\") elif self.path == \"/redirect/from-2\": self.send_response(302) self.send_header('uri', \"to-2\") self.end_headers() elif", "cookie_key: # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target", "\", self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: self.validate_headers() except", "== '/sucuri_shit': nonlocal sucuri_reqs_1 sucuri_reqs_1 += 1 if sucuri_reqs_1 >", "False ): # Configure mock server. if port_override: mock_server_port =", "mock_server = HTTPServer(('0.0.0.0', mock_server_port), captured_server) break except OSError: time.sleep(0.2) if", "self.headers['Authorization'] passval = val.split(\" \")[-1] passstr = base64.b64decode(passval) if passstr", "+= 1 if sucuri_reqs_3 > 3: raise RuntimeError(\"Too many requests", "OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'cf_js_challenge_03_12_2018.html')", "self.end_headers() elif self.path == \"/raw-txt\": self.send_response(200) self.send_header('Content-type', \"text/plain\") self.end_headers() self.wfile.write(b\"Root", "sucuri_reqs_3 > 3: raise RuntimeError(\"Too many requests to sucuri_shit_3 (%s)!\"", "from http.server import HTTPServer from threading import Thread import WebRequest", "elif self.path == \"/redirect/from-2\": self.send_response(302) self.send_header('uri', \"to-2\") self.end_headers() elif self.path", "failobj=[])[0] cook_key, cook_value = cook.split(\"=\", 1) if cook_key == 'sucuri_cloudproxy_uuid_6293e0004'", "\"waf_garbage\", 'sucuri_garbage.html') with open(fpath, \"rb\") as fp: plain_contents = fp.read()", "'<KEY>': # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target", "\"/filename_mime/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition\": self.send_response(200) self.send_header('Content-Disposition',", "target CF page!</title></head><body>CF Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath", "\"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK", "expected_headers = expected_headers, test_context = assertion_class, is_chromium = is_chromium, is_selenium_garbage_chromium", "for key, value in expected_headers.items(): if (is_annoying_pjs or is_selenium_garbage_chromium or", "and cook_value == '04cbb56494ebedbcd19a61b2d728c478': # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\")", "self.path == \"/filename_mime/content-disposition-html-suffix\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "self.wfile.write(b\"LOLWAT\") elif self.path == \"/json/valid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b'{\"oh\"", "# if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target CF", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path == \"/json/no-coding\":", "== \"/filename/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "with open(fpath, \"rb\") as fp: plain_contents = fp.read() self.server_version =", "\"/html-decode\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path ==", "OK?\" cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS) t1 = cobj.compress(inb) + cobj.flush() self.wfile.write(t1)", "cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS) t1 = cobj.compress(inb) + cobj.flush() self.wfile.write(t1) elif", "\"/json/invalid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT\") elif self.path == \"/json/valid\":", "page has a title!</body></html>\") elif self.path == \"/content/no-title\": self.send_response(200) self.end_headers()", "def validate_headers(self): for key, value in expected_headers.items(): if (is_annoying_pjs or", "self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit': nonlocal sucuri_reqs_1 sucuri_reqs_1 +=", "if x >= retries: raise # Start running mock server", "self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path == \"/filename/path-only.txt\": self.send_response(200) self.end_headers()", "sucuri_reqs_2 sucuri_reqs_2 += 1 if sucuri_reqs_2 > 1: raise RuntimeError(\"Too", "\"text/html\") self.end_headers() self.wfile.write(plain_contents) ################################################################################################################################## # Cloudflare validation ################################################################################################################################## elif self.path", "= datetime.datetime.now() + datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\")", "== cookie_key: # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At", "\"/redirect/from-1\": self.send_response(302) self.send_header('location', \"to-1\") self.end_headers() elif self.path == \"/redirect/to-1\": self.send_response(200)", "fetch). # Doing that requires pulling html content out of", "'/cookie_test': cook = cookies.SimpleCookie() cook['cookie_test_key'] = cookie_key cook['cookie_test_key']['path'] = \"/\"", "one request only nonlocal sucuri_reqs_2 sucuri_reqs_2 += 1 if sucuri_reqs_2", "== '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook = cookies.SimpleCookie() cook['cloudflare_validate_key'] = cookie_key cook['cloudflare_validate_key']['path'] =", "'Accept-Language': pass elif (is_annoying_pjs or is_chromium or is_selenium_garbage_chromium or skip_header_checks)", "1) if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478': #", "== \"/json/valid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif", "): # Configure mock server. if port_override: mock_server_port = port_override", "self.path == \"/redirect/to-2\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-2\") elif self.path == \"/redirect/from-3\":", "self.path == \"/redirect/bad-1\": self.send_response(302) self.end_headers() elif self.path == \"/redirect/bad-2\": self.send_response(302)", "\"/filename/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/path-only-trailing-slash/\": self.send_response(200) self.end_headers()", "# print(expected_headers) assert isinstance(expected_headers, dict), \"expected_headers must be a dict.", "line in traceback.format_exc().split(\"\\n\"): log.error(line) raise e return MockServerRequestHandler def get_free_port():", "start_server(assertion_class, from_wg, port_override = None, is_chromium = None, is_selenium_garbage_chromium =", "fucking retarded, and I can't override the user-agent # and", "self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/path-only.txt\": self.send_response(200)", "separate thread. # Daemon threads automatically shut down when the", "== \"/compressed/gzip\": self.send_response(200) self.send_header('Content-Encoding', 'gzip') self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(gzip.compress(b\"Root OK?\"))", "self.send_header('location', \"bad-2\") self.end_headers() elif self.path == \"/redirect/bad-3\": self.send_response(302) self.send_header('location', \"gopher://www.google.com\")", "e return MockServerRequestHandler def get_free_port(): s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) s.bind(('localhost',", "elif self.path == \"/html/real\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Root OK?</body></html>\")", "is_annoying_pjs, 'is_chromium' : is_chromium, 'is_selenium_garbage_chromium' : is_selenium_garbage_chromium, 'skip_header_checks' : skip_header_checks,", "already have a cookie for. # As such, we expect", "in handler!\") for line in traceback.format_exc().split(\"\\n\"): log.error(line) raise e return", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/redirect/bad-1\": self.send_response(302) self.end_headers()", "\"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y", "failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value = cook.split(\"=\", 1)", "pulling html content out of chromium, though. # Annoying. nonlocal", "as fp: plain_contents = fp.read() self.server_version = \"cloudflare is garbage\"", "or skip_header_checks) and key == 'Accept-Language': pass elif (is_annoying_pjs or", "self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/favicon.ico\": self.send_response(404) self.end_headers() elif", "elif self.path == '/cloudflare_under_attack_shit': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie',", "process exits. mock_server_thread = Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True) mock_server_thread.start() return mock_server_port, mock_server,", "OK?\")) elif self.path == \"/json/invalid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT\")", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/favicon.ico\": self.send_response(404)", "self.path == '/cloudflare_under_attack_shit_2': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0]", "is_chromium or is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept': pass", "= <KEY> log = logging.getLogger(\"Main.TestServer\") sucuri_reqs_1 = 0 sucuri_reqs_2 =", "Passed a %s\" & type(expected_headers) for key, val in expected_headers.items():", "if (is_annoying_pjs or is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Encoding':", "= cookie_key cook['cookie_test_key']['path'] = \"/\" cook['cookie_test_key']['domain'] = \"\" expiration =", "elif self.path == \"/filename_mime/content-disposition-html-suffix\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "Cookie stuff ################################################################################################################################## elif self.path == '/cookie_test': cook = cookies.SimpleCookie()", "# Process an HTTP GET request and return a response", "= get_free_port() expected_headers = dict(from_wg.browserHeaders) print(from_wg) print(expected_headers) assert isinstance(expected_headers, dict)", "many requests to sucuri_shit_2 (%s)!\" % sucuri_reqs_2) if self.headers.get_all('Cookie', failobj=[]):", "'skip_header_checks' : skip_header_checks, }, ) ) def _get_handler(self): # Process", "= zlib.compressobj(wbits=-zlib.MAX_WBITS) t1 = cobj.compress(inb) + cobj.flush() self.wfile.write(t1) elif self.path", "or is_chromium or is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept':", "if v2 is None: v2 = \"\" v2 = v2.replace(\"", "self.send_header('Content-type', \"text/plain\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/html-decode\": self.send_response(200)", "elif self.path == \"/filename_mime/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "requests to sucuri_shit_2 (%s)!\" % sucuri_reqs_2) if self.headers.get_all('Cookie', failobj=[]): cook", "def log_message(self, format, *args): return def validate_headers(self): for key, value", "This particular path is the one we should already have", "self.wfile.write(b\"<html><body>Root OK?</body></html>\") elif self.path == \"/compressed/deflate\": self.send_response(200) self.send_header('Content-Encoding', 'deflate') self.send_header('Content-type',", "== '/cloudflare_under_attack_shit': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key,", "*args): return def validate_headers(self): for key, value in expected_headers.items(): if", "validation ################################################################################################################################## elif self.path == '/cloudflare_under_attack_shit_2': if self.headers.get_all('Cookie', failobj=[]): cook", "== \"/redirect/bad-2\": self.send_response(302) self.send_header('location', \"bad-2\") self.end_headers() elif self.path == \"/redirect/bad-3\":", "self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString()) self.end_headers() body = \"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\")) ################################################################################################################################## #", "OK?</body></html>\") elif self.path == \"/compressed/deflate\": self.send_response(200) self.send_header('Content-Encoding', 'deflate') self.send_header('Content-type', \"text/html\")", "and key == 'Accept-Encoding': # So PhantomJS monkeys with accept-encoding", "self.path == \"/filename_mime/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition\":", "self.wfile.write(body.encode(\"utf-8\")) ################################################################################################################################## # Handle requests for an unknown path ##################################################################################################################################", "\"\" v2 = v2.replace(\" \", \"\") test_context.assertEqual(v1, v2, msg=\"Mismatch in", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/favicon.ico\":", "an HTTP 200 status. # print(\"Path: \", self.path) # print(\"Headers:", "has a title!</body></html>\") elif self.path == \"/content/no-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head></head><body>This", "elif self.path == \"/json/no-coding\": self.send_response(200) self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif", "= False, skip_header_checks = False ): # Configure mock server.", "port_override else: mock_server_port = get_free_port() expected_headers = dict(from_wg.browserHeaders) print(from_wg) print(expected_headers)", "Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\",", "self.path == \"/filename_mime/explicit-html-mime\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT?\")", "cook_value = cook.split(\"=\", 1) if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value", "html content out of chromium, though. # Annoying. nonlocal sucuri_reqs_3", "path ################################################################################################################################## else: test_context.assertEqual(self.path, \"This shouldn't happen!\") def do_GET(self): #", "'Accept': pass elif not skip_header_checks: v1 = value.replace(\" \", \"\")", "failobj=[])) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value", "dict) captured_server = capture_expected_headers( expected_headers = expected_headers, test_context = assertion_class,", "'cookie_test_key' and cook_value == cookie_key: self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie", "1): try: mock_server = HTTPServer(('0.0.0.0', mock_server_port), captured_server) break except OSError:", "\"/filename_mime/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-html-suffix\":", "raise # Start running mock server in a separate thread.", "zlib.compressobj(wbits=-zlib.MAX_WBITS) t1 = cobj.compress(inb) + cobj.flush() self.wfile.write(t1) elif self.path ==", "self.wfile.write(b\"<html><head><title>I can haz title?</title></head><body>This page has a title!</body></html>\") elif self.path", "import cookies from http.server import BaseHTTPRequestHandler from http.server import HTTPServer", "socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) s.bind(('localhost', 0)) address, port = s.getsockname() s.close() return", "MockServerRequestHandler def get_free_port(): s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) s.bind(('localhost', 0)) address,", "== \"/compressed/deflate\": self.send_response(200) self.send_header('Content-Encoding', 'deflate') self.send_header('Content-type', \"text/html\") self.end_headers() inb =", "WebRequest def capture_expected_headers(expected_headers, test_context, is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False): # print(\"Capturing", "# Cookie stuff ################################################################################################################################## elif self.path == '/cookie_test': cook =", "have a cookie for. # As such, we expect one", "HTTP GET request and return a response with an HTTP", "0)) address, port = s.getsockname() s.close() return port def start_server(assertion_class,", "if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target Sucuri page!</title></head><body>Sucuri", "self.send_response(302) self.send_header('location', \"to-1\") self.end_headers() elif self.path == \"/redirect/to-1\": self.send_response(200) self.end_headers()", "return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'cf_js_challenge_03_12_2018.html') with", "0 class MockServerRequestHandler(BaseHTTPRequestHandler): def log_message(self, format, *args): return def validate_headers(self):", "assert isinstance(expected_headers, dict), \"expected_headers must be a dict. Passed a", "self.end_headers() self.wfile.write(b\"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p2)?</body></html>\")", "None: v2 = \"\" v2 = v2.replace(\" \", \"\") test_context.assertEqual(v1,", "\"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie forwarded properly!</body></html>\") return self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\")", "+= 1 if sucuri_reqs_2 > 1: raise RuntimeError(\"Too many requests", "zlib import gzip import time import datetime from http import", "self.wfile.write(b\"<html><head><title>At target CF page!</title></head><body>CF Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__)", "elif self.path == '/cookie_require': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie',", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Root OK?</body></html>\") elif self.path == \"/compressed/deflate\":", ") def _get_handler(self): # Process an HTTP GET request and", "type=socket.SOCK_STREAM) s.bind(('localhost', 0)) address, port = s.getsockname() s.close() return port", "self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ################################################################################################################################## # Cookie stuff ################################################################################################################################## elif self.path == '/cookie_test':", "or is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept': pass elif", "self.path == \"/favicon.ico\": self.send_response(404) self.end_headers() elif self.path == \"/raw-txt\": self.send_response(200)", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie is missing</body></html>\") ################################################################################################################################## # Sucuri", "just 2 requests (cookie bounce, and fetch). # Doing that", "for x in range(retries + 1): try: mock_server = HTTPServer(('0.0.0.0',", "log.error(line) raise e return MockServerRequestHandler def get_free_port(): s = socket.socket(socket.AF_INET,", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\")", "OK?\") elif self.path == \"/favicon.ico\": self.send_response(404) self.end_headers() elif self.path ==", "self.send_response(200) self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path == \"/filename/path-only.txt\": self.send_response(200)", "many requests to sucuri_shit (%s)!\" % sucuri_reqs_1) # print(\"Fetch for", "port def start_server(assertion_class, from_wg, port_override = None, is_chromium = None,", "for key, val in expected_headers.items(): assert isinstance(key, str) assert isinstance(val,", "happen!\") def do_GET(self): # Process an HTTP GET request and", "like to get this down to just 2 requests (cookie", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString()) self.end_headers() body = \"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\"", "Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True) mock_server_thread.start() return mock_server_port, mock_server, mock_server_thread if __name__ ==", "from_wg, port_override = None, is_chromium = None, is_selenium_garbage_chromium = False,", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers()", "is_selenium_garbage_chromium = False, is_annoying_pjs = False, skip_header_checks = False ):", "newurl = \"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1]) self.send_header('uri', newurl) self.end_headers() elif self.path ==", "Ok?\") else: self.wfile.write(b\"Password Bad!\") elif self.path == \"/content/have-title\": self.send_response(200) self.end_headers()", "capture_expected_headers( expected_headers = expected_headers, test_context = assertion_class, is_chromium = is_chromium,", "= False, is_annoying_pjs = False, skip_header_checks = False ): #", "\"/html/real\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Root OK?</body></html>\") elif self.path ==", "%d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString()) self.end_headers() body", "cookie_key cook['cloudflare_validate_key']['path'] = \"/\" cook['cloudflare_validate_key']['domain'] = \"\" expiration = datetime.datetime.now()", "for \", self.path) # print(\"Cookies:\", self.headers.get_all('Cookie', failobj=[])) if self.headers.get_all('Cookie', failobj=[]):", "or is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Encoding': # So", "# Start running mock server in a separate thread. #", "get this down to just 2 requests (cookie bounce, and", "self.path == '/sucuri_shit_2': # This particular path is the one", "# Doing that requires pulling html content out of chromium,", "an HTTP 200 status. log.info(\"Request for URL path: '%s'\", self.path)", "== \"/json/invalid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT\") elif self.path ==", "# Sucuri validation ################################################################################################################################## elif self.path == '/sucuri_shit_3': # I'd", "\"text/plain\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/html-decode\": self.send_response(200) self.send_header('Content-type',", "% sucuri_reqs_2) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key,", "cookies from http.server import BaseHTTPRequestHandler from http.server import HTTPServer from", "self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ################################################################################################################################## # Cookie stuff ################################################################################################################################## elif self.path ==", "% sucuri_reqs_1) # print(\"Fetch for \", self.path) # print(\"Cookies:\", self.headers.get_all('Cookie',", "if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target CF page!</title></head><body>CF", "mock_server_port, mock_server, mock_server_thread if __name__ == '__main__': wg = WebRequest.WebGetRobust()", "& type(expected_headers) for key, val in expected_headers.items(): assert isinstance(key, str)", "################################################################################################################################## # Sucuri validation ################################################################################################################################## elif self.path == '/sucuri_shit_3': #", "http import cookies from http.server import BaseHTTPRequestHandler from http.server import", "expected_headers.items(): assert isinstance(key, str) assert isinstance(val, str) cookie_key = <KEY>", "is garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook", "return mock_server_port, mock_server, mock_server_thread if __name__ == '__main__': wg =", "retarded, and I can't override the user-agent # and other", "v2, msg=\"Mismatch in header parameter '{}' : '{}' -> '{}'", "self.send_response(200) self.send_header('Content-Disposition', \"filename='loler coaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-2\":", "circumvented OK (p3)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir,", "OK?\") elif self.path == \"/html/real\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Root", "os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html') with open(fpath, \"rb\") as", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='loler coaster.html'\") self.end_headers()", "self.wfile.write(b\"Root OK?\") elif self.path == \"/favicon.ico\": self.send_response(404) self.end_headers() elif self.path", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented", "cook_value == '<KEY>': # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "sucuri_reqs_2) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value", "> 3: raise RuntimeError(\"Too many requests to sucuri_shit_3 (%s)!\" %", "= \"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1]) self.send_header('uri', newurl) self.end_headers() elif self.path == \"/password/expect\":", "= wg, skip_header_checks = True) print(\"running server on port: \",", "is_chromium = is_chromium, is_selenium_garbage_chromium = is_selenium_garbage_chromium, is_annoying_pjs = is_annoying_pjs, skip_header_checks", "self.path == \"/redirect/bad-2\": self.send_response(302) self.send_header('location', \"bad-2\") self.end_headers() elif self.path ==", "\"text/html\") self.end_headers() inb = b\"Root OK?\" cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS) t1", "0 sucuri_reqs_3 = 0 class MockServerRequestHandler(BaseHTTPRequestHandler): def log_message(self, format, *args):", "dict. Passed a %s\" & type(expected_headers) for key, val in", "self.path == '/sucuri_shit': nonlocal sucuri_reqs_1 sucuri_reqs_1 += 1 if sucuri_reqs_1", "expected_headers.items(): if (is_annoying_pjs or is_selenium_garbage_chromium or skip_header_checks) and key ==", "== 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '<KEY>': # if cook[''] self.send_response(200)", "self.end_headers() self.wfile.write(b\"Redirect-To-2\") elif self.path == \"/redirect/from-3\": self.send_response(302) newurl = \"http://{}:{}\".format(self.server.server_address[0],", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString()) self.end_headers() self.wfile.write(b\"<html><body>CF Cookie Test</body></html>\") elif", "= value.replace(\" \", \"\") v2 = self.headers[key] if v2 is", "# if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target Sucuri", "################################################################################################################################## elif self.path == '/cloudflare_under_attack_shit_2': if self.headers.get_all('Cookie', failobj=[]): cook =", "= False ): # Configure mock server. if port_override: mock_server_port", "self.wfile.write(t1) elif self.path == \"/compressed/gzip\": self.send_response(200) self.send_header('Content-Encoding', 'gzip') self.send_header('Content-type', \"text/html\")", "sucuri_reqs_1) # print(\"Fetch for \", self.path) # print(\"Cookies:\", self.headers.get_all('Cookie', failobj=[]))", "self.wfile.write(b\"Password not sent!!\") return val = self.headers['Authorization'] passval = val.split(\"", "self.path == \"/redirect/from-3\": self.send_response(302) newurl = \"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1]) self.send_header('uri', newurl)", "cook.split(\"=\", 1) if cook_key == 'cookie_test_key' and cook_value == cookie_key:", "log.info(\"Request for URL path: '%s'\", self.path) # print(\"Headers: \", self.headers)", "dict(from_wg.browserHeaders) print(from_wg) print(expected_headers) assert isinstance(expected_headers, dict) captured_server = capture_expected_headers( expected_headers", "Sucuri page!</title></head><body>Preemptive waf circumvented OK (p2)?</body></html>\") return container_dir = os.path.dirname(__file__)", "print(expected_headers) assert isinstance(expected_headers, dict), \"expected_headers must be a dict. Passed", "cook.split(\"=\", 1) if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '<KEY>':", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\") self.end_headers()", "cookie for. # As such, we expect one request only", "Doing that requires pulling html content out of chromium, though.", "datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\")", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "# print(\"Cookies:\", self.headers.get_all('Cookie', failobj=[])) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie',", "= None, is_chromium = None, is_selenium_garbage_chromium = False, is_annoying_pjs =", "# print(\"Capturing expected headers:\") # print(expected_headers) assert isinstance(expected_headers, dict), \"expected_headers", "skip_header_checks) and key == 'Accept-Language': pass elif (is_annoying_pjs or is_chromium", "is None: v2 = \"\" v2 = v2.replace(\" \", \"\")", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie is missing</body></html>\") ################################################################################################################################## # Sucuri validation", "from http import cookies from http.server import BaseHTTPRequestHandler from http.server", "self.send_response(404) self.end_headers() elif self.path == \"/raw-txt\": self.send_response(200) self.send_header('Content-type', \"text/plain\") self.end_headers()", "self.wfile.write(b\"Password Ok?\") else: self.wfile.write(b\"Password Bad!\") elif self.path == \"/content/have-title\": self.send_response(200)", "isinstance(expected_headers, dict) captured_server = capture_expected_headers( expected_headers = expected_headers, test_context =", "({})\".format( key, value, self.headers[key], { 'is_annoying_pjs' : is_annoying_pjs, 'is_chromium' :", "== '04cbb56494ebedbcd19a61b2d728c478': # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At", "running mock server in a separate thread. # Daemon threads", "with an HTTP 200 status. log.info(\"Request for URL path: '%s'\",", "elif self.path == \"/content/no-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head></head><body>This page has no", "elif not skip_header_checks: v1 = value.replace(\" \", \"\") v2 =", "elif (is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Language': pass elif", "+ datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type',", "fp: plain_contents = fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) ##################################################################################################################################", "\"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit_2': # This particular", "test_context, is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False): # print(\"Capturing expected headers:\") #", "print(\"Headers: \", self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: return", "self.end_headers() elif self.path == \"/redirect/bad-2\": self.send_response(302) self.send_header('location', \"bad-2\") self.end_headers() elif", "elif self.path == \"/compressed/deflate\": self.send_response(200) self.send_header('Content-Encoding', 'deflate') self.send_header('Content-type', \"text/html\") self.end_headers()", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='loler coaster.html'\")", "capture_expected_headers(expected_headers, test_context, is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False): # print(\"Capturing expected headers:\")", "'Accept-Encoding': # So PhantomJS monkeys with accept-encoding headers # Just", "= None, from_wg = wg, skip_header_checks = True) print(\"running server", "wg, skip_header_checks = True) print(\"running server on port: \", srv)", "server. if port_override: mock_server_port = port_override else: mock_server_port = get_free_port()", "1) if cook_key == 'cloudflare_validate_key' and cook_value == cookie_key: #", "validate_headers(self): for key, value in expected_headers.items(): if (is_annoying_pjs or is_selenium_garbage_chromium", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf", "logging.getLogger(\"Main.TestServer\") sucuri_reqs_1 = 0 sucuri_reqs_2 = 0 sucuri_reqs_3 = 0", "self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\")", "'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '<KEY>': # if cook[''] self.send_response(200) self.send_header('Content-type',", "\"/raw-txt\": self.send_response(200) self.send_header('Content-type', \"text/plain\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path ==", "== \"/redirect/bad-1\": self.send_response(302) self.end_headers() elif self.path == \"/redirect/bad-2\": self.send_response(302) self.send_header('location',", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target CF page!</title></head><body>CF Redirected OK?</body></html>\") return", "self.path == \"/content/no-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head></head><body>This page has no title.", "cook_value = cook.split(\"=\", 1) if cook_key == 'cookie_test_key' and cook_value", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(gzip.compress(b\"Root OK?\")) elif self.path == \"/json/invalid\": self.send_response(200)", "a title!</body></html>\") elif self.path == \"/content/no-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head></head><body>This page", "self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-1\": self.send_response(200) self.send_header('Content-Disposition',", "raise if self.path == \"/\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root", "ignore that particular header, I guess. pass # Selenium is", "str) cookie_key = <KEY> log = logging.getLogger(\"Main.TestServer\") sucuri_reqs_1 = 0", "self.send_header('Content-Encoding', 'deflate') self.send_header('Content-type', \"text/html\") self.end_headers() inb = b\"Root OK?\" cobj", "other assorted parameters via their API at all. elif (is_selenium_garbage_chromium", "all. elif (is_selenium_garbage_chromium or skip_header_checks) and key == 'Accept-Language': pass", "to sucuri_shit_2 (%s)!\" % sucuri_reqs_2) if self.headers.get_all('Cookie', failobj=[]): cook =", "if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478': # if", "self.path == \"/compressed/deflate\": self.send_response(200) self.send_header('Content-Encoding', 'deflate') self.send_header('Content-type', \"text/html\") self.end_headers() inb", "== \"/filename/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/path-only-trailing-slash/\": self.send_response(200)", "expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S", "except OSError: time.sleep(0.2) if x >= retries: raise # Start", "elif self.path == '/sucuri_shit_3': # I'd like to get this", "return self._get_handler() except Exception as e: log.error(\"Exception in handler!\") for", "gzip import time import datetime from http import cookies from", "self._get_handler() except Exception as e: log.error(\"Exception in handler!\") for line", "server in a separate thread. # Daemon threads automatically shut", "mock_server_thread if __name__ == '__main__': wg = WebRequest.WebGetRobust() srv =", "should already have a cookie for. # As such, we", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit': nonlocal sucuri_reqs_1", "\"filename=lolercoaster.html\") self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/redirect/bad-1\": self.send_response(302)", "self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-html-suffix\": self.send_response(200) self.send_header('Content-Disposition',", "self.path == \"/filename/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/path-only-trailing-slash/\":", "self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/path-only-trailing-slash/\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\")", "self.headers: self.wfile.write(b\"Password not sent!!\") return val = self.headers['Authorization'] passval =", "haz title?</title></head><body>This page has a title!</body></html>\") elif self.path == \"/content/no-title\":", "200 status. # print(\"Path: \", self.path) # print(\"Headers: \", self.headers)", "= s.getsockname() s.close() return port def start_server(assertion_class, from_wg, port_override =", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\") self.end_headers() self.wfile.write(b\"LOLWAT?\")", "is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False): # print(\"Capturing expected headers:\") # print(expected_headers)", "return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html') with", "(%s)!\" % sucuri_reqs_1) # print(\"Fetch for \", self.path) # print(\"Cookies:\",", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='lolercoaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\")", "title?</title></head><body>This page has a title!</body></html>\") elif self.path == \"/content/no-title\": self.send_response(200)", "requests to sucuri_shit_3 (%s)!\" % sucuri_reqs_3) if self.headers.get_all('Cookie', failobj=[]): cook", "datetime from http import cookies from http.server import BaseHTTPRequestHandler from", "s.bind(('localhost', 0)) address, port = s.getsockname() s.close() return port def", "passstr = base64.b64decode(passval) if passstr == b'lol:<PASSWORD>': self.wfile.write(b\"Password Ok?\") else:", "__name__ == '__main__': wg = WebRequest.WebGetRobust() srv = start_server( assertion_class", "via their API at all. elif (is_selenium_garbage_chromium or skip_header_checks) and", "self.send_header('uri', newurl) self.end_headers() elif self.path == \"/password/expect\": # print(\"Password\") #", "self.wfile.write(b\"Root OK?\") elif self.path == \"/html-decode\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "# Daemon threads automatically shut down when the main process", "self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers()", "OSError: time.sleep(0.2) if x >= retries: raise # Start running", "= expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString())", "is_chromium, 'is_selenium_garbage_chromium' : is_selenium_garbage_chromium, 'skip_header_checks' : skip_header_checks, }, ) )", "threading import Thread import WebRequest def capture_expected_headers(expected_headers, test_context, is_chromium=False, is_selenium_garbage_chromium=False,", "self.wfile.write(b\"<html><body>Cookie is missing</body></html>\") ################################################################################################################################## # Sucuri validation ################################################################################################################################## elif self.path", "expected_headers = dict(from_wg.browserHeaders) print(from_wg) print(expected_headers) assert isinstance(expected_headers, dict) captured_server =", "if cook_key == 'cookie_test_key' and cook_value == cookie_key: self.send_response(200) self.send_header('Content-type',", "passstr == b'lol:<PASSWORD>': self.wfile.write(b\"Password Ok?\") else: self.wfile.write(b\"Password Bad!\") elif self.path", "self.path == \"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\\"loler coaster.html\\\"\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit_2': # This particular path", "self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\")", "cook = cookies.SimpleCookie() cook['cloudflare_validate_key'] = cookie_key cook['cloudflare_validate_key']['path'] = \"/\" cook['cloudflare_validate_key']['domain']", "if passstr == b'lol:<PASSWORD>': self.wfile.write(b\"Password Ok?\") else: self.wfile.write(b\"Password Bad!\") elif", "self.send_response(302) self.send_header('location', \"gopher://www.google.com\") self.end_headers() elif self.path == \"/redirect/from-1\": self.send_response(302) self.send_header('location',", "Cloudflare validation ################################################################################################################################## elif self.path == '/cloudflare_under_attack_shit_2': if self.headers.get_all('Cookie', failobj=[]):", "is_annoying_pjs=False, skip_header_checks=False): # print(\"Capturing expected headers:\") # print(expected_headers) assert isinstance(expected_headers,", "test_context.assertEqual(v1, v2, msg=\"Mismatch in header parameter '{}' : '{}' ->", "threads automatically shut down when the main process exits. mock_server_thread", "cook_value == cookie_key: self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie forwarded properly!</body></html>\")", "expect one request only nonlocal sucuri_reqs_2 sucuri_reqs_2 += 1 if", "self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook = cookies.SimpleCookie() cook['cloudflare_validate_key']", "OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html')", "self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif self.path", "== \"/favicon.ico\": self.send_response(404) self.end_headers() elif self.path == \"/raw-txt\": self.send_response(200) self.send_header('Content-type',", "4: raise RuntimeError(\"Too many requests to sucuri_shit (%s)!\" % sucuri_reqs_1)", "HTTPServer from threading import Thread import WebRequest def capture_expected_headers(expected_headers, test_context,", "1 if sucuri_reqs_3 > 3: raise RuntimeError(\"Too many requests to", "<KEY> log = logging.getLogger(\"Main.TestServer\") sucuri_reqs_1 = 0 sucuri_reqs_2 = 0", "\"/filename/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/path-only.txt\":", "sent!!\") return val = self.headers['Authorization'] passval = val.split(\" \")[-1] passstr", "elif self.path == \"/redirect/bad-2\": self.send_response(302) self.send_header('location', \"bad-2\") self.end_headers() elif self.path", "################################################################################################################################## elif self.path == '/cookie_test': cook = cookies.SimpleCookie() cook['cookie_test_key'] =", "fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit':", "self.path == \"/html-decode\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif", "self.wfile.write(plain_contents) elif self.path == '/cloudflare_under_attack_shit': if self.headers.get_all('Cookie', failobj=[]): cook =", "try: self.validate_headers() except Exception: self.send_response(500) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Headers failed", "\"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target CF page!</title></head><body>CF Redirected OK?</body></html>\") return container_dir", "wg = WebRequest.WebGetRobust() srv = start_server( assertion_class = None, from_wg", "= is_chromium, is_selenium_garbage_chromium = is_selenium_garbage_chromium, is_annoying_pjs = is_annoying_pjs, skip_header_checks =", "== \"/redirect/from-1\": self.send_response(302) self.send_header('location', \"to-1\") self.end_headers() elif self.path == \"/redirect/to-1\":", "cookie_key = <KEY> log = logging.getLogger(\"Main.TestServer\") sucuri_reqs_1 = 0 sucuri_reqs_2", "2 requests (cookie bounce, and fetch). # Doing that requires", "plain_contents = fp.read() self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) ################################################################################################################################## #", "self.send_header('uri', \"to-2\") self.end_headers() elif self.path == \"/redirect/to-2\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-2\")", "from http.server import BaseHTTPRequestHandler from http.server import HTTPServer from threading", "self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/redirect/bad-1\":", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/html/real\":", "= val.split(\" \")[-1] passstr = base64.b64decode(passval) if passstr == b'lol:<PASSWORD>':", "False, is_annoying_pjs = False, skip_header_checks = False ): # Configure", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/path-only-trailing-slash/\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "waf circumvented OK (p3)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath =", "cook['cookie_test_key']['domain'] = \"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"] =", "self.end_headers() self.wfile.write(b\"<html><head><title>At target Sucuri page!</title></head><body>Sucuri Redirected OK?</body></html>\") return container_dir =", "time import datetime from http import cookies from http.server import", "import socket import logging import os import base64 import zlib", "title. Sadface.jpg</body></html>\") elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers()", "mock_server, mock_server_thread if __name__ == '__main__': wg = WebRequest.WebGetRobust() srv", "self.wfile.write(b\"Redirect-To-2\") elif self.path == \"/redirect/from-3\": self.send_response(302) newurl = \"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1])", "RuntimeError(\"Too many requests to sucuri_shit_2 (%s)!\" % sucuri_reqs_2) if self.headers.get_all('Cookie',", "cookies.SimpleCookie() cook['cloudflare_validate_key'] = cookie_key cook['cloudflare_validate_key']['path'] = \"/\" cook['cloudflare_validate_key']['domain'] = \"\"", "(p3)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html')", "= \"/\" cook['cloudflare_validate_key']['domain'] = \"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30)", "is garbage\" self.send_response(503) self.send_header('Server', \"cloudflare is garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents)", "self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ################################################################################################################################## # Cookie stuff ################################################################################################################################## elif", "override the user-agent # and other assorted parameters via their", "nonlocal sucuri_reqs_2 sucuri_reqs_2 += 1 if sucuri_reqs_2 > 1: raise", "# print(\"Path: \", self.path) # print(\"Headers: \", self.headers) # print(\"Cookie(s):", "key, value in expected_headers.items(): if (is_annoying_pjs or is_selenium_garbage_chromium or skip_header_checks)", "and cook_value == '<KEY>': # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\")", "# Configure mock server. if port_override: mock_server_port = port_override else:", "As such, we expect one request only nonlocal sucuri_reqs_2 sucuri_reqs_2", "elif self.path == \"/favicon.ico\": self.send_response(404) self.end_headers() elif self.path == \"/raw-txt\":", "is_annoying_pjs, skip_header_checks = skip_header_checks ) retries = 4 for x", "garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook =", "}, ) ) def _get_handler(self): # Process an HTTP GET", "sucuri_reqs_2 > 1: raise RuntimeError(\"Too many requests to sucuri_shit_2 (%s)!\"", "import time import datetime from http import cookies from http.server", "self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path == \"/json/no-coding\": self.send_response(200) self.end_headers() self.wfile.write(b'{\"oh\"", "def capture_expected_headers(expected_headers, test_context, is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False): # print(\"Capturing expected", "handler!\") for line in traceback.format_exc().split(\"\\n\"): log.error(line) raise e return MockServerRequestHandler", "self.validate_headers() except Exception: self.send_response(500) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Headers failed validation!\")", "raise e return MockServerRequestHandler def get_free_port(): s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)", "if cook_key == 'cloudflare_validate_key' and cook_value == cookie_key: # if", "== 'Accept-Encoding': # So PhantomJS monkeys with accept-encoding headers #", "self.end_headers() self.wfile.write(b\"<html><head></head><body>This page has no title. Sadface.jpg</body></html>\") elif self.path ==", "\"/redirect/bad-3\": self.send_response(302) self.send_header('location', \"gopher://www.google.com\") self.end_headers() elif self.path == \"/redirect/from-1\": self.send_response(302)", "key, value, self.headers[key], { 'is_annoying_pjs' : is_annoying_pjs, 'is_chromium' : is_chromium,", "many requests to sucuri_shit_3 (%s)!\" % sucuri_reqs_3) if self.headers.get_all('Cookie', failobj=[]):", "page!</title></head><body>Sucuri Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir,", "elif self.path == \"/redirect/to-2\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-2\") elif self.path ==", "elif self.path == \"/filename/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "self.path == \"/content/have-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head><title>I can haz title?</title></head><body>This page", "in expected_headers.items(): if (is_annoying_pjs or is_selenium_garbage_chromium or skip_header_checks) and key", "elif self.path == \"/filename/path-only-trailing-slash/\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "assert isinstance(key, str) assert isinstance(val, str) cookie_key = <KEY> log", "log_message(self, format, *args): return def validate_headers(self): for key, value in", "base64 import zlib import gzip import time import datetime from", "is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False): # print(\"Capturing expected headers:\") # print(expected_headers) assert", "and I can't override the user-agent # and other assorted", "self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: return self._get_handler() except", "exits. mock_server_thread = Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True) mock_server_thread.start() return mock_server_port, mock_server, mock_server_thread", "no title. Sadface.jpg</body></html>\") elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\")", "cook.split(\"=\", 1) if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '04cbb56494ebedbcd19a61b2d728c478':", "self.end_headers() self.wfile.write(b\"<html><head><title>At target CF page!</title></head><body>CF Redirected OK?</body></html>\") return container_dir =", "self.end_headers() body = \"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\")) ################################################################################################################################## # Handle requests", "cook = cookies.SimpleCookie() cook['cookie_test_key'] = cookie_key cook['cookie_test_key']['path'] = \"/\" cook['cookie_test_key']['domain']", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Headers failed validation!\") raise if self.path ==", "(%s)!\" % sucuri_reqs_3) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0]", "self.send_header('Content-Disposition', \"filename='lolercoaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-2\": self.send_response(200) self.send_header('Content-Disposition',", "elif self.path == \"/filename_mime/explicit-html-mime\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.send_header('Content-type', \"text/html\") self.end_headers()", "type(expected_headers) for key, val in expected_headers.items(): assert isinstance(key, str) assert", "nonlocal sucuri_reqs_3 sucuri_reqs_3 += 1 if sucuri_reqs_3 > 3: raise", "retries: raise # Start running mock server in a separate", "requests (cookie bounce, and fetch). # Doing that requires pulling", "None, from_wg = wg, skip_header_checks = True) print(\"running server on", "\"cloudflare is garbage\" self.send_response(503) self.send_header('Server', \"cloudflare is garbage\") self.send_header('Content-type','text/html') self.end_headers()", "elif self.path == \"/filename_mime/content-disposition-quotes-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='lolercoaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "key == 'Accept-Encoding': # So PhantomJS monkeys with accept-encoding headers", "and return a response with an HTTP 200 status. #", "== cookie_key: self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie forwarded properly!</body></html>\") return", "self.wfile.write(b\"Root OK?\") elif self.path == \"/html/real\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "= assertion_class, is_chromium = is_chromium, is_selenium_garbage_chromium = is_selenium_garbage_chromium, is_annoying_pjs =", "Exception as e: log.error(\"Exception in handler!\") for line in traceback.format_exc().split(\"\\n\"):", "# Selenium is fucking retarded, and I can't override the", "elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif", "self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cloudflare_under_attack_shit': if self.headers.get_all('Cookie', failobj=[]): cook", "if __name__ == '__main__': wg = WebRequest.WebGetRobust() srv = start_server(", "must be a dict. Passed a %s\" & type(expected_headers) for", "self.send_header('location', \"gopher://www.google.com\") self.end_headers() elif self.path == \"/redirect/from-1\": self.send_response(302) self.send_header('location', \"to-1\")", "parameter '{}' : '{}' -> '{}' ({})\".format( key, value, self.headers[key],", "self.wfile.write(b\"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p3)?</body></html>\") return", "print(from_wg) print(expected_headers) assert isinstance(expected_headers, dict) captured_server = capture_expected_headers( expected_headers =", "srv = start_server( assertion_class = None, from_wg = wg, skip_header_checks", "'04cbb56494ebedbcd19a61b2d728c478': # if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target", "main process exits. mock_server_thread = Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True) mock_server_thread.start() return mock_server_port,", "sucuri_reqs_3 += 1 if sucuri_reqs_3 > 3: raise RuntimeError(\"Too many", ">= retries: raise # Start running mock server in a", "\"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ################################################################################################################################## # Cookie stuff ################################################################################################################################## elif self.path", "\"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\")) ################################################################################################################################## # Handle requests for an unknown", "as e: log.error(\"Exception in handler!\") for line in traceback.format_exc().split(\"\\n\"): log.error(line)", "elif self.path == \"/filename_mime/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "in self.headers: self.wfile.write(b\"Password not sent!!\") return val = self.headers['Authorization'] passval", "self.send_header('Content-Disposition', \"filename='loler coaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200)", "time.sleep(0.2) if x >= retries: raise # Start running mock", "self.send_response(200) self.send_header('Content-type', \"text/plain\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/html-decode\":", "nonlocal sucuri_reqs_1 sucuri_reqs_1 += 1 if sucuri_reqs_1 > 4: raise", "\", \"\") test_context.assertEqual(v1, v2, msg=\"Mismatch in header parameter '{}' :", "> 4: raise RuntimeError(\"Too many requests to sucuri_shit (%s)!\" %", "\", self.headers.get_all('Cookie', failobj=[])) try: self.validate_headers() except Exception: self.send_response(500) self.send_header('Content-type', \"text/html\")", "# print(self.headers) self.send_response(200) self.end_headers() if not 'Authorization' in self.headers: self.wfile.write(b\"Password", "a %s\" & type(expected_headers) for key, val in expected_headers.items(): assert", "\"/redirect/to-2\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-2\") elif self.path == \"/redirect/from-3\": self.send_response(302) newurl", "'/sucuri_shit_2': # This particular path is the one we should", "== \"/\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path", "self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value = cook.split(\"=\",", "= \"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"] = expiration.strftime(\"%a,", "\")[-1] passstr = base64.b64decode(passval) if passstr == b'lol:<PASSWORD>': self.wfile.write(b\"Password Ok?\")", "self.path == '/cookie_require': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0]", "though. # Annoying. nonlocal sucuri_reqs_3 sucuri_reqs_3 += 1 if sucuri_reqs_3", "expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString()) self.end_headers()", "cook.split(\"=\", 1) if cook_key == 'cloudflare_validate_key' and cook_value == cookie_key:", "self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200) self.send_header('Content-Disposition',", "print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: self.validate_headers() except Exception: self.send_response(500) self.send_header('Content-type',", "skip_header_checks = skip_header_checks ) retries = 4 for x in", "> 1: raise RuntimeError(\"Too many requests to sucuri_shit_2 (%s)!\" %", "mock_server_port = port_override else: mock_server_port = get_free_port() expected_headers = dict(from_wg.browserHeaders)", "\"expected_headers must be a dict. Passed a %s\" & type(expected_headers)", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/explicit-html-mime\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.send_header('Content-type',", "self.server_version = \"cloudflare is garbage\" self.send_response(503) self.send_header('Server', \"cloudflare is garbage\")", "address, port = s.getsockname() s.close() return port def start_server(assertion_class, from_wg,", "can haz title?</title></head><body>This page has a title!</body></html>\") elif self.path ==", ": skip_header_checks, }, ) ) def _get_handler(self): # Process an", "= cook.split(\"=\", 1) if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value ==", "preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p2)?</body></html>\") return container_dir =", "self.path == \"/compressed/gzip\": self.send_response(200) self.send_header('Content-Encoding', 'gzip') self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(gzip.compress(b\"Root", "cookie_key cook['cookie_test_key']['path'] = \"/\" cook['cookie_test_key']['domain'] = \"\" expiration = datetime.datetime.now()", "= os.path.join(container_dir, \"waf_garbage\", 'cf_js_challenge_03_12_2018.html') with open(fpath, \"rb\") as fp: plain_contents", "'is_annoying_pjs' : is_annoying_pjs, 'is_chromium' : is_chromium, 'is_selenium_garbage_chromium' : is_selenium_garbage_chromium, 'skip_header_checks'", "\"filename=\\'lolercoaster.html\\'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='loler", "such, we expect one request only nonlocal sucuri_reqs_2 sucuri_reqs_2 +=", "self.path == '/cloudflare_under_attack_shit': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0]", "is the one we should already have a cookie for.", "self.path == \"/json/invalid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT\") elif self.path", "Cookie Test</body></html>\") elif self.path == '/cookie_require': if self.headers.get_all('Cookie', failobj=[]): cook", "http.server import HTTPServer from threading import Thread import WebRequest def", "self.end_headers() elif self.path == \"/password/expect\": # print(\"Password\") # print(self.headers) self.send_response(200)", "(is_annoying_pjs or is_chromium or is_selenium_garbage_chromium or skip_header_checks) and key ==", "captured_server) break except OSError: time.sleep(0.2) if x >= retries: raise", "I'd like to get this down to just 2 requests", "particular header, I guess. pass # Selenium is fucking retarded,", "datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\")", "I can't override the user-agent # and other assorted parameters", "skip_header_checks, }, ) ) def _get_handler(self): # Process an HTTP", "self.path == \"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='loler coaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cloudflare_under_attack_shit': if self.headers.get_all('Cookie', failobj=[]):", "failed validation!\") raise if self.path == \"/\": self.send_response(200) self.send_header('Content-type', \"text/html\")", "= is_selenium_garbage_chromium, is_annoying_pjs = is_annoying_pjs, skip_header_checks = skip_header_checks ) retries", "\"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\")", "break except OSError: time.sleep(0.2) if x >= retries: raise #", "content out of chromium, though. # Annoying. nonlocal sucuri_reqs_3 sucuri_reqs_3", "and fetch). # Doing that requires pulling html content out", "self.end_headers() self.wfile.write(b\"<html><body>Cookie forwarded properly!</body></html>\") return self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie", "self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-html-suffix\": self.send_response(200)", "self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type', \"image/jpeg\") self.end_headers() self.wfile.write(b\"Binary!\\x00\\x01\\x02\\x03\") ################################################################################################################################## #", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(plain_contents) elif self.path == '/sucuri_shit': nonlocal", "with an HTTP 200 status. # print(\"Path: \", self.path) #", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/html/real\": self.send_response(200)", "cook['cloudflare_validate_key']['domain'] = \"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"] =", "monkeys with accept-encoding headers # Just ignore that particular header,", "\"text/html\") self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString()) self.end_headers() body = \"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\")) ##################################################################################################################################", "self.send_response(200) self.send_header('Content-Disposition', \"filename=\\\"loler coaster.html\\\"\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/explicit-html-mime\":", "else: test_context.assertEqual(self.path, \"This shouldn't happen!\") def do_GET(self): # Process an", "in traceback.format_exc().split(\"\\n\"): log.error(line) raise e return MockServerRequestHandler def get_free_port(): s", "parameters via their API at all. elif (is_selenium_garbage_chromium or skip_header_checks)", "if port_override: mock_server_port = port_override else: mock_server_port = get_free_port() expected_headers", "# if cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target preemptive", "Sucuri page!</title></head><body>Sucuri Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath =", "self.server.server_address[1]) self.send_header('uri', newurl) self.end_headers() elif self.path == \"/password/expect\": # print(\"Password\")", "\"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-html-suffix\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\")", "http.server import BaseHTTPRequestHandler from http.server import HTTPServer from threading import", ": \"hai\"}') elif self.path == \"/filename/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "self.path) # print(\"Cookies:\", self.headers.get_all('Cookie', failobj=[])) if self.headers.get_all('Cookie', failobj=[]): cook =", "def start_server(assertion_class, from_wg, port_override = None, is_chromium = None, is_selenium_garbage_chromium", "'/sucuri_shit': nonlocal sucuri_reqs_1 sucuri_reqs_1 += 1 if sucuri_reqs_1 > 4:", "if sucuri_reqs_3 > 3: raise RuntimeError(\"Too many requests to sucuri_shit_3", "= is_annoying_pjs, skip_header_checks = skip_header_checks ) retries = 4 for", "== '/cookie_require': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key,", "key == 'Accept': pass elif not skip_header_checks: v1 = value.replace(\"", "OK?\") elif self.path == \"/html-decode\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root", "+= 1 if sucuri_reqs_1 > 4: raise RuntimeError(\"Too many requests", "self.path == \"/redirect/bad-3\": self.send_response(302) self.send_header('location', \"gopher://www.google.com\") self.end_headers() elif self.path ==", "== \"/redirect/from-3\": self.send_response(302) newurl = \"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1]) self.send_header('uri', newurl) self.end_headers()", "= \"\" v2 = v2.replace(\" \", \"\") test_context.assertEqual(v1, v2, msg=\"Mismatch", "cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\")) ################################################################################################################################## # Handle requests for an unknown path", "this down to just 2 requests (cookie bounce, and fetch).", "# I'd like to get this down to just 2", "\", self.path) # print(\"Headers: \", self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie',", "== \"/content/have-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head><title>I can haz title?</title></head><body>This page has", "container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html') with open(fpath,", "the main process exits. mock_server_thread = Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True) mock_server_thread.start() return", "\"filename=\\\"loler coaster.html\\\"\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/explicit-html-mime\": self.send_response(200) self.send_header('Content-Disposition',", "get_free_port() expected_headers = dict(from_wg.browserHeaders) print(from_wg) print(expected_headers) assert isinstance(expected_headers, dict) captured_server", "3: raise RuntimeError(\"Too many requests to sucuri_shit_3 (%s)!\" % sucuri_reqs_3)", "MockServerRequestHandler(BaseHTTPRequestHandler): def log_message(self, format, *args): return def validate_headers(self): for key,", "= b\"Root OK?\" cobj = zlib.compressobj(wbits=-zlib.MAX_WBITS) t1 = cobj.compress(inb) +", "we expect one request only nonlocal sucuri_reqs_2 sucuri_reqs_2 += 1", "Daemon threads automatically shut down when the main process exits.", "expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString()) self.end_headers()", "= v2.replace(\" \", \"\") test_context.assertEqual(v1, v2, msg=\"Mismatch in header parameter", "response with an HTTP 200 status. # print(\"Path: \", self.path)", "uuid import socket import logging import os import base64 import", "\"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y", "isinstance(val, str) cookie_key = <KEY> log = logging.getLogger(\"Main.TestServer\") sucuri_reqs_1 =", "== \"/filename_mime/content-disposition-quotes-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "\"/\" cook['cloudflare_validate_key']['domain'] = \"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"]", "self.path == \"/password/expect\": # print(\"Password\") # print(self.headers) self.send_response(200) self.end_headers() if", "\"/redirect/from-3\": self.send_response(302) newurl = \"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1]) self.send_header('uri', newurl) self.end_headers() elif", "self.wfile.write(plain_contents) elif self.path == '/sucuri_shit_2': # This particular path is", "else: mock_server_port = get_free_port() expected_headers = dict(from_wg.browserHeaders) print(from_wg) print(expected_headers) assert", "self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>': cook = cookies.SimpleCookie()", "self.headers[key] if v2 is None: v2 = \"\" v2 =", "is_chromium = None, is_selenium_garbage_chromium = False, is_annoying_pjs = False, skip_header_checks", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT\") elif self.path == \"/json/valid\": self.send_response(200) self.send_header('Content-type',", "dict), \"expected_headers must be a dict. Passed a %s\" &", "== 'cookie_test_key' and cook_value == cookie_key: self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "request and return a response with an HTTP 200 status.", "to get this down to just 2 requests (cookie bounce,", "Thread import WebRequest def capture_expected_headers(expected_headers, test_context, is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False, skip_header_checks=False):", "'__main__': wg = WebRequest.WebGetRobust() srv = start_server( assertion_class = None,", "= cook.split(\"=\", 1) if cook_key == 'cookie_test_key' and cook_value ==", "get_free_port(): s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) s.bind(('localhost', 0)) address, port =", "0 sucuri_reqs_2 = 0 sucuri_reqs_3 = 0 class MockServerRequestHandler(BaseHTTPRequestHandler): def", "cook['cloudflare_validate_key'].OutputString()) self.end_headers() body = \"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\")) ################################################################################################################################## # Handle", "format, *args): return def validate_headers(self): for key, value in expected_headers.items():", "accept-encoding headers # Just ignore that particular header, I guess.", "skip_header_checks=False): # print(\"Capturing expected headers:\") # print(expected_headers) assert isinstance(expected_headers, dict),", "datetime.datetime.now() + datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200)", "guess. pass # Selenium is fucking retarded, and I can't", "self.end_headers() self.wfile.write(b\"<html><body>CF Cookie Test</body></html>\") elif self.path == '/cookie_require': if self.headers.get_all('Cookie',", "self.end_headers() self.wfile.write(b\"Redirect-To-1\") elif self.path == \"/redirect/from-2\": self.send_response(302) self.send_header('uri', \"to-2\") self.end_headers()", "coaster.html\\\"\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/explicit-html-mime\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\")", "= \"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\")) ################################################################################################################################## # Handle requests for an", "test_context.assertEqual(self.path, \"This shouldn't happen!\") def do_GET(self): # Process an HTTP", "response with an HTTP 200 status. log.info(\"Request for URL path:", "self.wfile.write(b\"<html><head></head><body>This page has no title. Sadface.jpg</body></html>\") elif self.path == \"/binary_ctnt\":", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie forwarded properly!</body></html>\") return self.send_response(200) self.send_header('Content-type',", "\", self.headers.get_all('Cookie', failobj=[])) try: return self._get_handler() except Exception as e:", "\"/filename_mime/content-disposition-quotes-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-1\":", "shut down when the main process exits. mock_server_thread = Thread(target=mock_server.serve_forever)", "s.close() return port def start_server(assertion_class, from_wg, port_override = None, is_chromium", "sucuri_reqs_2 = 0 sucuri_reqs_3 = 0 class MockServerRequestHandler(BaseHTTPRequestHandler): def log_message(self,", "Test</body></html>\") elif self.path == '/cookie_require': if self.headers.get_all('Cookie', failobj=[]): cook =", "\"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/html/real\": self.send_response(200) self.send_header('Content-type',", "# This particular path is the one we should already", "elif self.path == \"/raw-txt\": self.send_response(200) self.send_header('Content-type', \"text/plain\") self.end_headers() self.wfile.write(b\"Root OK?\")", "1 if sucuri_reqs_1 > 4: raise RuntimeError(\"Too many requests to", "self.path == \"/redirect/from-2\": self.send_response(302) self.send_header('uri', \"to-2\") self.end_headers() elif self.path ==", "-> '{}' ({})\".format( key, value, self.headers[key], { 'is_annoying_pjs' : is_annoying_pjs,", "self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cloudflare_validate_key'].OutputString()) self.end_headers() body = \"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\"))", "import HTTPServer from threading import Thread import WebRequest def capture_expected_headers(expected_headers,", "\"cloudflare is garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cloudflare_under_attack_shit':", "not 'Authorization' in self.headers: self.wfile.write(b\"Password not sent!!\") return val =", "elif self.path == \"/redirect/from-1\": self.send_response(302) self.send_header('location', \"to-1\") self.end_headers() elif self.path", "status. log.info(\"Request for URL path: '%s'\", self.path) # print(\"Headers: \",", "sucuri_shit_2 (%s)!\" % sucuri_reqs_2) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie',", "open(fpath, \"rb\") as fp: plain_contents = fp.read() self.server_version = \"cloudflare", "cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target preemptive Sucuri page!</title></head><body>Preemptive", "out of chromium, though. # Annoying. nonlocal sucuri_reqs_3 sucuri_reqs_3 +=", "= self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value = cook.split(\"=\", 1) if cook_key", "we should already have a cookie for. # As such,", "target preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p3)?</body></html>\") return container_dir", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Root OK?</body></html>\") elif self.path == \"/compressed/deflate\": self.send_response(200)", "return self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie is missing</body></html>\") ################################################################################################################################## #", "circumvented OK (p2)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir,", "msg=\"Mismatch in header parameter '{}' : '{}' -> '{}' ({})\".format(", "== \"/content/no-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head></head><body>This page has no title. Sadface.jpg</body></html>\")", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\\"loler coaster.html\\\"\") self.end_headers()", "################################################################################################################################## else: test_context.assertEqual(self.path, \"This shouldn't happen!\") def do_GET(self): # Process", "preemptive Sucuri page!</title></head><body>Preemptive waf circumvented OK (p3)?</body></html>\") return container_dir =", "URL path: '%s'\", self.path) # print(\"Headers: \", self.headers) # print(\"Cookie(s):", ": is_selenium_garbage_chromium, 'skip_header_checks' : skip_header_checks, }, ) ) def _get_handler(self):", "elif self.path == '/cookie_test': cook = cookies.SimpleCookie() cook['cookie_test_key'] = cookie_key", "os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html') with open(fpath, \"rb\") as fp: plain_contents =", "in header parameter '{}' : '{}' -> '{}' ({})\".format( key,", "test_context = assertion_class, is_chromium = is_chromium, is_selenium_garbage_chromium = is_selenium_garbage_chromium, is_annoying_pjs", "self.send_response(302) self.end_headers() elif self.path == \"/redirect/bad-2\": self.send_response(302) self.send_header('location', \"bad-2\") self.end_headers()", "cook_key == 'cookie_test_key' and cook_value == cookie_key: self.send_response(200) self.send_header('Content-type', \"text/html\")", "# print(\"Fetch for \", self.path) # print(\"Cookies:\", self.headers.get_all('Cookie', failobj=[])) if", "== \"/redirect/to-2\": self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-2\") elif self.path == \"/redirect/from-3\": self.send_response(302)", "\"hai\"}') elif self.path == \"/filename/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "v2 is None: v2 = \"\" v2 = v2.replace(\" \",", "a separate thread. # Daemon threads automatically shut down when", "self.send_response(200) self.end_headers() if not 'Authorization' in self.headers: self.wfile.write(b\"Password not sent!!\")", "self.path == \"/html/real\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Root OK?</body></html>\") elif", "self.send_response(200) self.send_header('Content-Encoding', 'gzip') self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(gzip.compress(b\"Root OK?\")) elif self.path", "val in expected_headers.items(): assert isinstance(key, str) assert isinstance(val, str) cookie_key", "self.path == \"/filename_mime/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "with accept-encoding headers # Just ignore that particular header, I", "self.wfile.write(b\"Password Bad!\") elif self.path == \"/content/have-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head><title>I can", "page!</title></head><body>Preemptive waf circumvented OK (p2)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath", "= 0 sucuri_reqs_3 = 0 class MockServerRequestHandler(BaseHTTPRequestHandler): def log_message(self, format,", "self.path == '/cookie_test': cook = cookies.SimpleCookie() cook['cookie_test_key'] = cookie_key cook['cookie_test_key']['path']", "self.path == '/sucuri_shit_3': # I'd like to get this down", "'deflate') self.send_header('Content-type', \"text/html\") self.end_headers() inb = b\"Root OK?\" cobj =", "Configure mock server. if port_override: mock_server_port = port_override else: mock_server_port", "\"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie is missing</body></html>\") ################################################################################################################################## # Sucuri validation ##################################################################################################################################", "body = \"<html><body>Setting cookies.<script>window.location.href='/cloudflare_under_attack_shit'</script></body></html>\" self.wfile.write(body.encode(\"utf-8\")) ################################################################################################################################## # Handle requests for", "1) if cook_key == 'cookie_test_key' and cook_value == cookie_key: self.send_response(200)", "self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path ==", "str) assert isinstance(val, str) cookie_key = <KEY> log = logging.getLogger(\"Main.TestServer\")", "\"/content/no-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head></head><body>This page has no title. Sadface.jpg</body></html>\") elif", "CF page!</title></head><body>CF Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__) fpath =", "Selenium is fucking retarded, and I can't override the user-agent", "cook_value = cook.split(\"=\", 1) if cook_key == 'cloudflare_validate_key' and cook_value", "# Just ignore that particular header, I guess. pass #", "################################################################################################################################## # Cookie stuff ################################################################################################################################## elif self.path == '/cookie_test': cook", "assorted parameters via their API at all. elif (is_selenium_garbage_chromium or", "sucuri_shit (%s)!\" % sucuri_reqs_1) # print(\"Fetch for \", self.path) #", "So PhantomJS monkeys with accept-encoding headers # Just ignore that", "\"cloudflare is garbage\") self.send_header('Content-type','text/html') self.end_headers() self.wfile.write(plain_contents) elif self.path == '/cdn-cgi/l/chk_jschl?jschl_vc=427c2b1cd4fba29608ee81b200e94bfa&pass=<PASSWORD>&jschl_answer=<PASSWORD>':", "is_selenium_garbage_chromium, 'skip_header_checks' : skip_header_checks, }, ) ) def _get_handler(self): #", "self.send_response(200) self.end_headers() self.wfile.write(b\"Redirect-To-1\") elif self.path == \"/redirect/from-2\": self.send_response(302) self.send_header('uri', \"to-2\")", "if not 'Authorization' in self.headers: self.wfile.write(b\"Password not sent!!\") return val", "elif self.path == \"/filename/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "captured_server = capture_expected_headers( expected_headers = expected_headers, test_context = assertion_class, is_chromium", "def get_free_port(): s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) s.bind(('localhost', 0)) address, port", "from_wg = wg, skip_header_checks = True) print(\"running server on port:", "\"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1]) self.send_header('uri', newurl) self.end_headers() elif self.path == \"/password/expect\": #", "is_chromium, is_selenium_garbage_chromium = is_selenium_garbage_chromium, is_annoying_pjs = is_annoying_pjs, skip_header_checks = skip_header_checks", "200 status. log.info(\"Request for URL path: '%s'\", self.path) # print(\"Headers:", "= cookies.SimpleCookie() cook['cloudflare_validate_key'] = cookie_key cook['cloudflare_validate_key']['path'] = \"/\" cook['cloudflare_validate_key']['domain'] =", "self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path == \"/html/real\": self.send_response(200) self.send_header('Content-type', \"text/html\")", "self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-1\": self.send_response(200)", "self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head><title>I can haz title?</title></head><body>This page has a title!</body></html>\")", "1 if sucuri_reqs_2 > 1: raise RuntimeError(\"Too many requests to", "mock_server_port), captured_server) break except OSError: time.sleep(0.2) if x >= retries:", "'{}' ({})\".format( key, value, self.headers[key], { 'is_annoying_pjs' : is_annoying_pjs, 'is_chromium'", "failobj=[])) try: return self._get_handler() except Exception as e: log.error(\"Exception in", "that particular header, I guess. pass # Selenium is fucking", "1: raise RuntimeError(\"Too many requests to sucuri_shit_2 (%s)!\" % sucuri_reqs_2)", "sucuri_reqs_1 += 1 if sucuri_reqs_1 > 4: raise RuntimeError(\"Too many", "coaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\\"loler", "'%s'\", self.path) # print(\"Headers: \", self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie',", "'/cookie_require': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key, cook_value", "assert isinstance(expected_headers, dict) captured_server = capture_expected_headers( expected_headers = expected_headers, test_context", "= WebRequest.WebGetRobust() srv = start_server( assertion_class = None, from_wg =", "import Thread import WebRequest def capture_expected_headers(expected_headers, test_context, is_chromium=False, is_selenium_garbage_chromium=False, is_annoying_pjs=False,", "Start running mock server in a separate thread. # Daemon", "try: mock_server = HTTPServer(('0.0.0.0', mock_server_port), captured_server) break except OSError: time.sleep(0.2)", "pass elif not skip_header_checks: v1 = value.replace(\" \", \"\") v2", "except Exception as e: log.error(\"Exception in handler!\") for line in", "%d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString()) self.end_headers() self.wfile.write(b\"<html><body>CF", "sucuri_reqs_1 > 4: raise RuntimeError(\"Too many requests to sucuri_shit (%s)!\"", "elif self.path == '/sucuri_shit': nonlocal sucuri_reqs_1 sucuri_reqs_1 += 1 if", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/explicit-html-mime\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.html\") self.send_header('Content-type', \"text/html\")", "has no title. Sadface.jpg</body></html>\") elif self.path == \"/binary_ctnt\": self.send_response(200) self.send_header('Content-type',", "cook['cookie_test_key'].OutputString()) self.end_headers() self.wfile.write(b\"<html><body>CF Cookie Test</body></html>\") elif self.path == '/cookie_require': if", "self.wfile.write(b\"LOLWAT?\") elif self.path == \"/redirect/bad-1\": self.send_response(302) self.end_headers() elif self.path ==", "= 4 for x in range(retries + 1): try: mock_server", "self.path == \"/redirect/from-1\": self.send_response(302) self.send_header('location', \"to-1\") self.end_headers() elif self.path ==", "the one we should already have a cookie for. #", "False, skip_header_checks = False ): # Configure mock server. if", "if sucuri_reqs_2 > 1: raise RuntimeError(\"Too many requests to sucuri_shit_2", "Sucuri page!</title></head><body>Preemptive waf circumvented OK (p3)?</body></html>\") return container_dir = os.path.dirname(__file__)", "self.path == \"/filename/content-disposition\": self.send_response(200) self.send_header('Content-Disposition', \"filename=lolercoaster.txt\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "to sucuri_shit_3 (%s)!\" % sucuri_reqs_3) if self.headers.get_all('Cookie', failobj=[]): cook =", "shouldn't happen!\") def do_GET(self): # Process an HTTP GET request", "elif self.path == \"/redirect/from-3\": self.send_response(302) newurl = \"http://{}:{}\".format(self.server.server_address[0], self.server.server_address[1]) self.send_header('uri',", "is missing</body></html>\") ################################################################################################################################## # Sucuri validation ################################################################################################################################## elif self.path ==", "\"/compressed/gzip\": self.send_response(200) self.send_header('Content-Encoding', 'gzip') self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(gzip.compress(b\"Root OK?\")) elif", "\"text/html\") self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path == \"/json/no-coding\": self.send_response(200)", "skip_header_checks) and key == 'Accept-Encoding': # So PhantomJS monkeys with", "= os.path.join(container_dir, \"waf_garbage\", 'sucuri_garbage.html') with open(fpath, \"rb\") as fp: plain_contents", "\"/filename_mime/content-disposition-quotes-spaces-1\": self.send_response(200) self.send_header('Content-Disposition', \"filename='loler coaster.html'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path ==", "= port_override else: mock_server_port = get_free_port() expected_headers = dict(from_wg.browserHeaders) print(from_wg)", "'/sucuri_shit_3': # I'd like to get this down to just", "# Cloudflare validation ################################################################################################################################## elif self.path == '/cloudflare_under_attack_shit_2': if self.headers.get_all('Cookie',", "%H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString()) self.end_headers() self.wfile.write(b\"<html><body>CF Cookie", "cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '<KEY>': # if cook['']", "\"/\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"Root OK?\") elif self.path ==", "cookies.SimpleCookie() cook['cookie_test_key'] = cookie_key cook['cookie_test_key']['path'] = \"/\" cook['cookie_test_key']['domain'] = \"\"", "\"/compressed/deflate\": self.send_response(200) self.send_header('Content-Encoding', 'deflate') self.send_header('Content-type', \"text/html\") self.end_headers() inb = b\"Root", "v2 = v2.replace(\" \", \"\") test_context.assertEqual(v1, v2, msg=\"Mismatch in header", "== '/sucuri_shit_3': # I'd like to get this down to", "# As such, we expect one request only nonlocal sucuri_reqs_2", "self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString()) self.end_headers() self.wfile.write(b\"<html><body>CF Cookie Test</body></html>\") elif self.path == '/cookie_require':", "automatically shut down when the main process exits. mock_server_thread =", "Bad!\") elif self.path == \"/content/have-title\": self.send_response(200) self.end_headers() self.wfile.write(b\"<html><head><title>I can haz", "page!</title></head><body>Preemptive waf circumvented OK (p3)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath", "print(\"Headers: \", self.headers) # print(\"Cookie(s): \", self.headers.get_all('Cookie', failobj=[])) try: self.validate_headers()", "self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/path-only-trailing-slash/\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif", "= \"\" expiration = datetime.datetime.now() + datetime.timedelta(days=30) cook['cookie_test_key'][\"expires\"] = expiration.strftime(\"%a,", "self.wfile.write(b'{\"oh\" : \"hai\"}') elif self.path == \"/filename/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\")", "isinstance(key, str) assert isinstance(val, str) cookie_key = <KEY> log =", "self.end_headers() self.wfile.write(b\"LOLWAT\") elif self.path == \"/json/valid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "OK (p2)?</body></html>\") return container_dir = os.path.dirname(__file__) fpath = os.path.join(container_dir, \"waf_garbage\",", "not sent!!\") return val = self.headers['Authorization'] passval = val.split(\" \")[-1]", "self.end_headers() self.wfile.write(b\"<html><body>Cookie is missing</body></html>\") ################################################################################################################################## # Sucuri validation ################################################################################################################################## elif", "cook[''] self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><head><title>At target CF page!</title></head><body>CF Redirected", "datetime.datetime.now() + datetime.timedelta(days=30) cook['cloudflare_validate_key'][\"expires\"] = expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200)", "== 'cloudflare_validate_key' and cook_value == cookie_key: # if cook[''] self.send_response(200)", "== \"/filename_mime/path-only.txt\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename_mime/content-disposition\": self.send_response(200)", "self.wfile.write(b\"<html><head><title>At target Sucuri page!</title></head><body>Sucuri Redirected OK?</body></html>\") return container_dir = os.path.dirname(__file__)", "thread. # Daemon threads automatically shut down when the main", "== '/cloudflare_under_attack_shit_2': if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0] cook_key,", "1) if cook_key == 'sucuri_cloudproxy_uuid_6293e0004' and cook_value == '<KEY>': #", "is_annoying_pjs = False, skip_header_checks = False ): # Configure mock", "if sucuri_reqs_1 > 4: raise RuntimeError(\"Too many requests to sucuri_shit", "elif self.path == \"/json/invalid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"LOLWAT\") elif", ": '{}' -> '{}' ({})\".format( key, value, self.headers[key], { 'is_annoying_pjs'", "self.path == \"/filename/path-only-trailing-slash/\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/content-disposition\":", "self.path == \"/filename_mime/content-disposition-quotes-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\'lolercoaster.html\\'\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "cook_key == 'cloudflare_validate_key' and cook_value == cookie_key: # if cook['']", "requests for an unknown path ################################################################################################################################## else: test_context.assertEqual(self.path, \"This shouldn't", "None, is_selenium_garbage_chromium = False, is_annoying_pjs = False, skip_header_checks = False", "self.wfile.write(b\"<html><body>CF Cookie Test</body></html>\") elif self.path == '/cookie_require': if self.headers.get_all('Cookie', failobj=[]):", "mock_server_thread = Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True) mock_server_thread.start() return mock_server_port, mock_server, mock_server_thread if", "(%s)!\" % sucuri_reqs_2) if self.headers.get_all('Cookie', failobj=[]): cook = self.headers.get_all('Cookie', failobj=[])[0]", "self.path == \"/json/valid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b'{\"oh\" : \"hai\"}')", "header parameter '{}' : '{}' -> '{}' ({})\".format( key, value,", "\"waf_garbage\", 'cf_js_challenge_03_12_2018.html') with open(fpath, \"rb\") as fp: plain_contents = fp.read()", "a cookie for. # As such, we expect one request", "\", \"\") v2 = self.headers[key] if v2 is None: v2", "self.wfile.write(gzip.compress(b\"Root OK?\")) elif self.path == \"/json/invalid\": self.send_response(200) self.send_header('Content-type', \"text/html\") self.end_headers()", "= expiration.strftime(\"%a, %d-%b-%Y %H:%M:%S PST\") self.send_response(200) self.send_header('Content-type', \"text/html\") self.send_header('Set-Cookie', cook['cookie_test_key'].OutputString())", "try: return self._get_handler() except Exception as e: log.error(\"Exception in handler!\")", "print(\"Password\") # print(self.headers) self.send_response(200) self.end_headers() if not 'Authorization' in self.headers:", "return def validate_headers(self): for key, value in expected_headers.items(): if (is_annoying_pjs", "self.send_header('Content-type', \"text/html\") self.end_headers() self.wfile.write(b\"<html><body>Cookie forwarded properly!</body></html>\") return self.send_response(200) self.send_header('Content-type', \"text/html\")", "skip_header_checks = True) print(\"running server on port: \", srv) while", "== \"/filename/path-only-trailing-slash/\": self.send_response(200) self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path == \"/filename/content-disposition\": self.send_response(200)", "== \"/filename_mime/content-disposition-quotes-spaces-2\": self.send_response(200) self.send_header('Content-Disposition', \"filename=\\\"loler coaster.html\\\"\") self.end_headers() self.wfile.write(b\"LOLWAT?\") elif self.path", "RuntimeError(\"Too many requests to sucuri_shit_3 (%s)!\" % sucuri_reqs_3) if self.headers.get_all('Cookie',", "plain_contents = fp.read() self.server_version = \"cloudflare is garbage\" self.send_response(503) self.send_header('Server',", "can't override the user-agent # and other assorted parameters via", "when the main process exits. mock_server_thread = Thread(target=mock_server.serve_forever) mock_server_thread.setDaemon(True) mock_server_thread.start()", "self.end_headers() self.wfile.write(b\"Headers failed validation!\") raise if self.path == \"/\": self.send_response(200)" ]
[ "as np import pandas import scipy.optimize import sys import argparse", "{int(sol[i])}\") i += 1 return 0 def main(): name =", "self.tot_credits = sum(self.owned_credits) + sum(self.credits) def weight_average(self, v): term1 =", "mean, csv): self.df = pandas.read_csv(csv) self.course = self.df['name'] self.expected_mean =", "default='courses.csv', type=str, help='path to the csv file containing the courses", "class WeightAverage: def __init__(self, mean, csv): self.df = pandas.read_csv(csv) self.course", "'fun': ineq_constraint_2}) res = scipy.optimize.minimize(self.weight_average, self.grade_initial_sol, method='SLSQP', constraints=cons) if not", "for i in range(0, len(self.owned_grades)): term1 = term1 + self.owned_grades[i]", "the courses (default: courses.csv)') parser.add_argument('--floor', default=False, action='store_true', help='apply floor operation", "current grades :(\") exit() for index, row in df.query('grade >", "cons = ( {'type': 'eq', 'fun': self.eq_constraint}, {'type': 'ineq', 'fun':", "minimum grades required to get a certain weight average of", "not res.success: return None return res.x def error_no_solution(): print(\"Mean not", "print(f\"Expected mean: {avg} -> {int(round(avg / 30 * 110, 0))}", "given the desired output and the grades already owned.\"\"\" parser", "argparse.ArgumentParser(name, description=description) parser.add_argument('mean', metavar='M', type=float, nargs='+', help='The expected mean') parser.add_argument('--file',dest='file',", "sol is None: error_no_solution() if args.ceil: sol = [math.ceil(x) for", "args.floor: sol = [math.floor(x) for x in sol] else: sol", "return self.weight_average(v) - self.expected_mean def solve(self): cons = ( {'type':", "110, 0))} / 110\") if sol is None: print(\"Not Possible", "= self.df[['grade']].query('grade > 0').transpose().to_numpy()[0] self.tot_credits = sum(self.owned_credits) + sum(self.credits) def", "np.array([mean for _ in range(0, len(self.credits))]) self.owned_credits = self.df[['credits', 'grade']].query('grade", "already owned.\"\"\" parser = argparse.ArgumentParser(name, description=description) parser.add_argument('mean', metavar='M', type=float, nargs='+',", "instead of round to solution') parser.add_argument('--ceil', default=False, action='store_true', help='apply ceil", "to solution') parser.add_argument('--ceil', default=False, action='store_true', help='apply ceil operation instead of", "self.df[['grade']].query('grade > 0').transpose().to_numpy()[0] self.tot_credits = sum(self.owned_credits) + sum(self.credits) def weight_average(self,", "import pandas import scipy.optimize import sys import argparse def ineq_constraint_1(v):", "self.df = pandas.read_csv(csv) self.course = self.df['name'] self.expected_mean = mean self.credits", "'ineq', 'fun': ineq_constraint_2}) res = scipy.optimize.minimize(self.weight_average, self.grade_initial_sol, method='SLSQP', constraints=cons) if", "csv import math import numpy as np import pandas import", "self.owned_credits = self.df[['credits', 'grade']].query('grade > 0')[['credits']].transpose().to_numpy()[0] self.owned_grades = self.df[['grade']].query('grade >", "name = \"calcGrades\" description = \"\"\"CalcGrades is an utility which", "v]) class WeightAverage: def __init__(self, mean, csv): self.df = pandas.read_csv(csv)", "sol = solver.solve() if sol is None: error_no_solution() if args.ceil:", "self.df[['credits', 'grade']].query('grade > 0')[['credits']].transpose().to_numpy()[0] self.owned_grades = self.df[['grade']].query('grade > 0').transpose().to_numpy()[0] self.tot_credits", "0 term2 = 0 for i in range(0, len(self.owned_grades)): term1", "0 for index, row in df.query('grade == 0').iterrows(): print(f\"'{row['name']}', credits:", "= term1 + self.owned_grades[i] * self.owned_credits[i] for i in range(0,", "range(0, len(self.credits))]) self.owned_credits = self.df[['credits', 'grade']].query('grade > 0')[['credits']].transpose().to_numpy()[0] self.owned_grades =", "import csv import math import numpy as np import pandas", "pandas.read_csv(csv) self.course = self.df['name'] self.expected_mean = mean self.credits = self.df[['credits',", "of round to solution') parser.add_argument('--ceil', default=False, action='store_true', help='apply ceil operation", "in sol] else: sol = [round(x) for x in sol]", "solution') args = parser.parse_args() mean = args.mean courses = args.file", "len(self.owned_grades)): term1 = term1 + self.owned_grades[i] * self.owned_credits[i] for i", "in sol] elif args.floor: sol = [math.floor(x) for x in", "nargs='+', help='The expected mean') parser.add_argument('--file',dest='file', default='courses.csv', type=str, help='path to the", "print(\"Not Possible with current grades :(\") exit() for index, row", "main(): name = \"calcGrades\" description = \"\"\"CalcGrades is an utility", "action='store_true', help='apply ceil operation instead of round to solution') args", "print(f\"'{row['name']}', credits: {row['credits']}, grade {row['grade']}\") i = 0 for index,", "get a certain weight average of the grades over the", ":(\") exit() for index, row in df.query('grade > 0').iterrows(): print(f\"'{row['name']}',", "and the grades already owned.\"\"\" parser = argparse.ArgumentParser(name, description=description) parser.add_argument('mean',", "= solver.weight_average(sol) df = solver.df print(f\"Expected mean: {avg} -> {int(round(avg", "res.x def error_no_solution(): print(\"Mean not possible with current vote :(\")", "0 for i in range(0, len(self.owned_grades)): term1 = term1 +", "solver.weight_average(sol) df = solver.df print(f\"Expected mean: {avg} -> {int(round(avg /", "sol] else: sol = [round(x) for x in sol] output_result(solver,", "if sol is None: error_no_solution() if args.ceil: sol = [math.ceil(x)", "scipy.optimize.minimize(self.weight_average, self.grade_initial_sol, method='SLSQP', constraints=cons) if not res.success: return None return", "in v]) class WeightAverage: def __init__(self, mean, csv): self.df =", "pandas import scipy.optimize import sys import argparse def ineq_constraint_1(v): return", "self.grade_initial_sol, method='SLSQP', constraints=cons) if not res.success: return None return res.x", "self.owned_grades = self.df[['grade']].query('grade > 0').transpose().to_numpy()[0] self.tot_credits = sum(self.owned_credits) + sum(self.credits)", "grades already owned.\"\"\" parser = argparse.ArgumentParser(name, description=description) parser.add_argument('mean', metavar='M', type=float,", "Possible with current grades :(\") exit() for index, row in", "= 0 for i in range(0, len(self.owned_grades)): term1 = term1", "print(f\"'{row['name']}', credits: {row['credits']}, grade {int(sol[i])}\") i += 1 return 0", "scipy.optimize import sys import argparse def ineq_constraint_1(v): return np.array([vi for", "len(self.credits))]) self.owned_credits = self.df[['credits', 'grade']].query('grade > 0')[['credits']].transpose().to_numpy()[0] self.owned_grades = self.df[['grade']].query('grade", "> 0').transpose().to_numpy()[0] self.tot_credits = sum(self.owned_credits) + sum(self.credits) def weight_average(self, v):", "def weight_average(self, v): term1 = 0 term2 = 0 for", "= sum(self.owned_credits) + sum(self.credits) def weight_average(self, v): term1 = 0", "-> {int(round(avg / 30 * 110, 0))} / 110\") if", "if args.ceil: sol = [math.ceil(x) for x in sol] elif", "sol] output_result(solver, sol) return 0 if __name__ == '__main__': main()", "= [round(x) for x in sol] output_result(solver, sol) return 0", "'grade']].query('grade > 0')[['credits']].transpose().to_numpy()[0] self.owned_grades = self.df[['grade']].query('grade > 0').transpose().to_numpy()[0] self.tot_credits =", "= 0 term2 = 0 for i in range(0, len(self.owned_grades)):", "\"calcGrades\" description = \"\"\"CalcGrades is an utility which purpose is", "[math.floor(x) for x in sol] else: sol = [round(x) for", "in range(0, len(self.owned_grades)): term1 = term1 + self.owned_grades[i] * self.owned_credits[i]", "of the grades over the credits, given the desired output", "( {'type': 'eq', 'fun': self.eq_constraint}, {'type': 'ineq', 'fun': ineq_constraint_1}, {'type':", "'fun': self.eq_constraint}, {'type': 'ineq', 'fun': ineq_constraint_1}, {'type': 'ineq', 'fun': ineq_constraint_2})", "def __init__(self, mean, csv): self.df = pandas.read_csv(csv) self.course = self.df['name']", "owned.\"\"\" parser = argparse.ArgumentParser(name, description=description) parser.add_argument('mean', metavar='M', type=float, nargs='+', help='The", "{'type': 'ineq', 'fun': ineq_constraint_1}, {'type': 'ineq', 'fun': ineq_constraint_2}) res =", "= WeightAverage(mean, courses) sol = solver.solve() if sol is None:", "csv file containing the courses (default: courses.csv)') parser.add_argument('--floor', default=False, action='store_true',", "30 for vi in v]) class WeightAverage: def __init__(self, mean,", "math import numpy as np import pandas import scipy.optimize import", "args.file solver = WeightAverage(mean, courses) sol = solver.solve() if sol", "v): return self.weight_average(v) - self.expected_mean def solve(self): cons = (", "mean self.credits = self.df[['credits', 'grade']].query('grade == 0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol = np.array([mean", ":(\") exit(0) def output_result(solver, sol): avg = solver.weight_average(sol) df =", "= parser.parse_args() mean = args.mean courses = args.file solver =", "current vote :(\") exit(0) def output_result(solver, sol): avg = solver.weight_average(sol)", "__init__(self, mean, csv): self.df = pandas.read_csv(csv) self.course = self.df['name'] self.expected_mean", "self.tot_credits def eq_constraint(self, v): return self.weight_average(v) - self.expected_mean def solve(self):", "for x in sol] output_result(solver, sol) return 0 if __name__", "(term1 + term2) / self.tot_credits def eq_constraint(self, v): return self.weight_average(v)", "def solve(self): cons = ( {'type': 'eq', 'fun': self.eq_constraint}, {'type':", "'fun': ineq_constraint_1}, {'type': 'ineq', 'fun': ineq_constraint_2}) res = scipy.optimize.minimize(self.weight_average, self.grade_initial_sol,", "args.ceil: sol = [math.ceil(x) for x in sol] elif args.floor:", "+ 30 for vi in v]) class WeightAverage: def __init__(self,", "mean: {avg} -> {int(round(avg / 30 * 110, 0))} /", "constraints=cons) if not res.success: return None return res.x def error_no_solution():", "for x in sol] else: sol = [round(x) for x", "ineq_constraint_2(v): return np.array([-vi + 30 for vi in v]) class", "0 def main(): name = \"calcGrades\" description = \"\"\"CalcGrades is", "solver.df print(f\"Expected mean: {avg} -> {int(round(avg / 30 * 110,", "_ in range(0, len(self.credits))]) self.owned_credits = self.df[['credits', 'grade']].query('grade > 0')[['credits']].transpose().to_numpy()[0]", "is an utility which purpose is to compute the minimum", "with current vote :(\") exit(0) def output_result(solver, sol): avg =", "term1 = 0 term2 = 0 for i in range(0,", "{row['credits']}, grade {row['grade']}\") i = 0 for index, row in", "for _ in range(0, len(self.credits))]) self.owned_credits = self.df[['credits', 'grade']].query('grade >", "= solver.solve() if sol is None: error_no_solution() if args.ceil: sol", "in range(0, len(self.credits))]) self.owned_credits = self.df[['credits', 'grade']].query('grade > 0')[['credits']].transpose().to_numpy()[0] self.owned_grades", "= args.mean courses = args.file solver = WeightAverage(mean, courses) sol", "containing the courses (default: courses.csv)') parser.add_argument('--floor', default=False, action='store_true', help='apply floor", "self.expected_mean def solve(self): cons = ( {'type': 'eq', 'fun': self.eq_constraint},", "description = \"\"\"CalcGrades is an utility which purpose is to", "i = 0 for index, row in df.query('grade == 0').iterrows():", "df.query('grade == 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {int(sol[i])}\") i +=", "range(0, len(v)): term2 = term2 + v[i] * self.credits[i] return", "* self.owned_credits[i] for i in range(0, len(v)): term2 = term2", "help='apply floor operation instead of round to solution') parser.add_argument('--ceil', default=False,", "in df.query('grade > 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {row['grade']}\") i", "None: print(\"Not Possible with current grades :(\") exit() for index,", "args = parser.parse_args() mean = args.mean courses = args.file solver", "{'type': 'eq', 'fun': self.eq_constraint}, {'type': 'ineq', 'fun': ineq_constraint_1}, {'type': 'ineq',", "to the csv file containing the courses (default: courses.csv)') parser.add_argument('--floor',", "np.array([-vi + 30 for vi in v]) class WeightAverage: def", "{'type': 'ineq', 'fun': ineq_constraint_2}) res = scipy.optimize.minimize(self.weight_average, self.grade_initial_sol, method='SLSQP', constraints=cons)", "def error_no_solution(): print(\"Mean not possible with current vote :(\") exit(0)", "default=False, action='store_true', help='apply ceil operation instead of round to solution')", "vi in v]) class WeightAverage: def __init__(self, mean, csv): self.df", "parser.add_argument('--file',dest='file', default='courses.csv', type=str, help='path to the csv file containing the", "courses) sol = solver.solve() if sol is None: error_no_solution() if", "term2 = 0 for i in range(0, len(self.owned_grades)): term1 =", "None return res.x def error_no_solution(): print(\"Mean not possible with current", "def main(): name = \"calcGrades\" description = \"\"\"CalcGrades is an", "purpose is to compute the minimum grades required to get", "exit(0) def output_result(solver, sol): avg = solver.weight_average(sol) df = solver.df", "courses.csv)') parser.add_argument('--floor', default=False, action='store_true', help='apply floor operation instead of round", "0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {row['grade']}\") i = 0 for", "return np.array([-vi + 30 for vi in v]) class WeightAverage:", "output_result(solver, sol): avg = solver.weight_average(sol) df = solver.df print(f\"Expected mean:", "the grades over the credits, given the desired output and", "parser.add_argument('--ceil', default=False, action='store_true', help='apply ceil operation instead of round to", "= self.df['name'] self.expected_mean = mean self.credits = self.df[['credits', 'grade']].query('grade ==", "x in sol] else: sol = [round(x) for x in", "= np.array([mean for _ in range(0, len(self.credits))]) self.owned_credits = self.df[['credits',", "0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol = np.array([mean for _ in range(0, len(self.credits))]) self.owned_credits", "return 0 def main(): name = \"calcGrades\" description = \"\"\"CalcGrades", "self.df['name'] self.expected_mean = mean self.credits = self.df[['credits', 'grade']].query('grade == 0')[['credits']].transpose().to_numpy()[0]", "self.credits = self.df[['credits', 'grade']].query('grade == 0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol = np.array([mean for", "= scipy.optimize.minimize(self.weight_average, self.grade_initial_sol, method='SLSQP', constraints=cons) if not res.success: return None", "help='path to the csv file containing the courses (default: courses.csv)')", "res = scipy.optimize.minimize(self.weight_average, self.grade_initial_sol, method='SLSQP', constraints=cons) if not res.success: return", "credits: {row['credits']}, grade {row['grade']}\") i = 0 for index, row", "weight_average(self, v): term1 = 0 term2 = 0 for i", "row in df.query('grade == 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {int(sol[i])}\")", "utility which purpose is to compute the minimum grades required", "the desired output and the grades already owned.\"\"\" parser =", "len(v)): term2 = term2 + v[i] * self.credits[i] return (term1", "* 110, 0))} / 110\") if sol is None: print(\"Not", "ceil operation instead of round to solution') args = parser.parse_args()", "/ self.tot_credits def eq_constraint(self, v): return self.weight_average(v) - self.expected_mean def", "solution') parser.add_argument('--ceil', default=False, action='store_true', help='apply ceil operation instead of round", "sys import argparse def ineq_constraint_1(v): return np.array([vi for vi in", "weight average of the grades over the credits, given the", "operation instead of round to solution') args = parser.parse_args() mean", "over the credits, given the desired output and the grades", "{row['credits']}, grade {int(sol[i])}\") i += 1 return 0 def main():", "/ 110\") if sol is None: print(\"Not Possible with current", "output and the grades already owned.\"\"\" parser = argparse.ArgumentParser(name, description=description)", "grade {int(sol[i])}\") i += 1 return 0 def main(): name", "ineq_constraint_2}) res = scipy.optimize.minimize(self.weight_average, self.grade_initial_sol, method='SLSQP', constraints=cons) if not res.success:", "import argparse def ineq_constraint_1(v): return np.array([vi for vi in v])", "/ 30 * 110, 0))} / 110\") if sol is", "help='apply ceil operation instead of round to solution') args =", "expected mean') parser.add_argument('--file',dest='file', default='courses.csv', type=str, help='path to the csv file", "in df.query('grade == 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {int(sol[i])}\") i", "to solution') args = parser.parse_args() mean = args.mean courses =", "default=False, action='store_true', help='apply floor operation instead of round to solution')", "0))} / 110\") if sol is None: print(\"Not Possible with", "= [math.ceil(x) for x in sol] elif args.floor: sol =", "type=str, help='path to the csv file containing the courses (default:", "the credits, given the desired output and the grades already", "sol is None: print(\"Not Possible with current grades :(\") exit()", "help='The expected mean') parser.add_argument('--file',dest='file', default='courses.csv', type=str, help='path to the csv", "which purpose is to compute the minimum grades required to", "term2 + v[i] * self.credits[i] return (term1 + term2) /", "df = solver.df print(f\"Expected mean: {avg} -> {int(round(avg / 30", "file containing the courses (default: courses.csv)') parser.add_argument('--floor', default=False, action='store_true', help='apply", "eq_constraint(self, v): return self.weight_average(v) - self.expected_mean def solve(self): cons =", "v]) def ineq_constraint_2(v): return np.array([-vi + 30 for vi in", "+ term2) / self.tot_credits def eq_constraint(self, v): return self.weight_average(v) -", "credits: {row['credits']}, grade {int(sol[i])}\") i += 1 return 0 def", "'ineq', 'fun': ineq_constraint_1}, {'type': 'ineq', 'fun': ineq_constraint_2}) res = scipy.optimize.minimize(self.weight_average,", "v): term1 = 0 term2 = 0 for i in", "= ( {'type': 'eq', 'fun': self.eq_constraint}, {'type': 'ineq', 'fun': ineq_constraint_1},", "= \"calcGrades\" description = \"\"\"CalcGrades is an utility which purpose", "term1 + self.owned_grades[i] * self.owned_credits[i] for i in range(0, len(v)):", "return (term1 + term2) / self.tot_credits def eq_constraint(self, v): return", "csv): self.df = pandas.read_csv(csv) self.course = self.df['name'] self.expected_mean = mean", "the grades already owned.\"\"\" parser = argparse.ArgumentParser(name, description=description) parser.add_argument('mean', metavar='M',", "self.weight_average(v) - self.expected_mean def solve(self): cons = ( {'type': 'eq',", "WeightAverage(mean, courses) sol = solver.solve() if sol is None: error_no_solution()", "parser.add_argument('--floor', default=False, action='store_true', help='apply floor operation instead of round to", "vi in v]) def ineq_constraint_2(v): return np.array([-vi + 30 for", "exit() for index, row in df.query('grade > 0').iterrows(): print(f\"'{row['name']}', credits:", "operation instead of round to solution') parser.add_argument('--ceil', default=False, action='store_true', help='apply", "solve(self): cons = ( {'type': 'eq', 'fun': self.eq_constraint}, {'type': 'ineq',", "is None: error_no_solution() if args.ceil: sol = [math.ceil(x) for x", "possible with current vote :(\") exit(0) def output_result(solver, sol): avg", "= pandas.read_csv(csv) self.course = self.df['name'] self.expected_mean = mean self.credits =", "def eq_constraint(self, v): return self.weight_average(v) - self.expected_mean def solve(self): cons", "> 0')[['credits']].transpose().to_numpy()[0] self.owned_grades = self.df[['grade']].query('grade > 0').transpose().to_numpy()[0] self.tot_credits = sum(self.owned_credits)", "mean = args.mean courses = args.file solver = WeightAverage(mean, courses)", "for vi in v]) def ineq_constraint_2(v): return np.array([-vi + 30", "solver = WeightAverage(mean, courses) sol = solver.solve() if sol is", "self.expected_mean = mean self.credits = self.df[['credits', 'grade']].query('grade == 0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol", "== 0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol = np.array([mean for _ in range(0, len(self.credits))])", "\"\"\"CalcGrades is an utility which purpose is to compute the", "i += 1 return 0 def main(): name = \"calcGrades\"", "credits, given the desired output and the grades already owned.\"\"\"", "term2 = term2 + v[i] * self.credits[i] return (term1 +", "index, row in df.query('grade > 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade", "with current grades :(\") exit() for index, row in df.query('grade", "numpy as np import pandas import scipy.optimize import sys import", "range(0, len(self.owned_grades)): term1 = term1 + self.owned_grades[i] * self.owned_credits[i] for", "= mean self.credits = self.df[['credits', 'grade']].query('grade == 0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol =", "== 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {int(sol[i])}\") i += 1", "print(\"Mean not possible with current vote :(\") exit(0) def output_result(solver,", "method='SLSQP', constraints=cons) if not res.success: return None return res.x def", "x in sol] output_result(solver, sol) return 0 if __name__ ==", "term1 = term1 + self.owned_grades[i] * self.owned_credits[i] for i in", "for index, row in df.query('grade > 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']},", "res.success: return None return res.x def error_no_solution(): print(\"Mean not possible", "[round(x) for x in sol] output_result(solver, sol) return 0 if", "x in sol] elif args.floor: sol = [math.floor(x) for x", "self.eq_constraint}, {'type': 'ineq', 'fun': ineq_constraint_1}, {'type': 'ineq', 'fun': ineq_constraint_2}) res", "sol): avg = solver.weight_average(sol) df = solver.df print(f\"Expected mean: {avg}", "ineq_constraint_1}, {'type': 'ineq', 'fun': ineq_constraint_2}) res = scipy.optimize.minimize(self.weight_average, self.grade_initial_sol, method='SLSQP',", "mean') parser.add_argument('--file',dest='file', default='courses.csv', type=str, help='path to the csv file containing", "def output_result(solver, sol): avg = solver.weight_average(sol) df = solver.df print(f\"Expected", "for i in range(0, len(v)): term2 = term2 + v[i]", "round to solution') args = parser.parse_args() mean = args.mean courses", "'grade']].query('grade == 0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol = np.array([mean for _ in range(0,", "floor operation instead of round to solution') parser.add_argument('--ceil', default=False, action='store_true',", "+ v[i] * self.credits[i] return (term1 + term2) / self.tot_credits", "WeightAverage: def __init__(self, mean, csv): self.df = pandas.read_csv(csv) self.course =", "self.df[['credits', 'grade']].query('grade == 0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol = np.array([mean for _ in", "for vi in v]) class WeightAverage: def __init__(self, mean, csv):", "metavar='M', type=float, nargs='+', help='The expected mean') parser.add_argument('--file',dest='file', default='courses.csv', type=str, help='path", "np.array([vi for vi in v]) def ineq_constraint_2(v): return np.array([-vi +", "* self.credits[i] return (term1 + term2) / self.tot_credits def eq_constraint(self,", "sol] elif args.floor: sol = [math.floor(x) for x in sol]", "return None return res.x def error_no_solution(): print(\"Mean not possible with", "in v]) def ineq_constraint_2(v): return np.array([-vi + 30 for vi", "110\") if sol is None: print(\"Not Possible with current grades", "avg = solver.weight_average(sol) df = solver.df print(f\"Expected mean: {avg} ->", "certain weight average of the grades over the credits, given", "courses = args.file solver = WeightAverage(mean, courses) sol = solver.solve()", "error_no_solution() if args.ceil: sol = [math.ceil(x) for x in sol]", "import numpy as np import pandas import scipy.optimize import sys", "desired output and the grades already owned.\"\"\" parser = argparse.ArgumentParser(name,", "= argparse.ArgumentParser(name, description=description) parser.add_argument('mean', metavar='M', type=float, nargs='+', help='The expected mean')", "to compute the minimum grades required to get a certain", "parser.add_argument('mean', metavar='M', type=float, nargs='+', help='The expected mean') parser.add_argument('--file',dest='file', default='courses.csv', type=str,", "self.credits[i] return (term1 + term2) / self.tot_credits def eq_constraint(self, v):", "courses (default: courses.csv)') parser.add_argument('--floor', default=False, action='store_true', help='apply floor operation instead", "self.owned_credits[i] for i in range(0, len(v)): term2 = term2 +", "vote :(\") exit(0) def output_result(solver, sol): avg = solver.weight_average(sol) df", "type=float, nargs='+', help='The expected mean') parser.add_argument('--file',dest='file', default='courses.csv', type=str, help='path to", "sol = [round(x) for x in sol] output_result(solver, sol) return", "self.owned_grades[i] * self.owned_credits[i] for i in range(0, len(v)): term2 =", "'eq', 'fun': self.eq_constraint}, {'type': 'ineq', 'fun': ineq_constraint_1}, {'type': 'ineq', 'fun':", "the csv file containing the courses (default: courses.csv)') parser.add_argument('--floor', default=False,", "index, row in df.query('grade == 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade", "term2) / self.tot_credits def eq_constraint(self, v): return self.weight_average(v) - self.expected_mean", "is to compute the minimum grades required to get a", "a certain weight average of the grades over the credits,", "self.course = self.df['name'] self.expected_mean = mean self.credits = self.df[['credits', 'grade']].query('grade", "None: error_no_solution() if args.ceil: sol = [math.ceil(x) for x in", "= args.file solver = WeightAverage(mean, courses) sol = solver.solve() if", "0').transpose().to_numpy()[0] self.tot_credits = sum(self.owned_credits) + sum(self.credits) def weight_average(self, v): term1", "= solver.df print(f\"Expected mean: {avg} -> {int(round(avg / 30 *", "parser = argparse.ArgumentParser(name, description=description) parser.add_argument('mean', metavar='M', type=float, nargs='+', help='The expected", "df.query('grade > 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {row['grade']}\") i =", "required to get a certain weight average of the grades", "self.grade_initial_sol = np.array([mean for _ in range(0, len(self.credits))]) self.owned_credits =", "return res.x def error_no_solution(): print(\"Mean not possible with current vote", "argparse def ineq_constraint_1(v): return np.array([vi for vi in v]) def", "action='store_true', help='apply floor operation instead of round to solution') parser.add_argument('--ceil',", "solver.solve() if sol is None: error_no_solution() if args.ceil: sol =", "description=description) parser.add_argument('mean', metavar='M', type=float, nargs='+', help='The expected mean') parser.add_argument('--file',dest='file', default='courses.csv',", "{row['grade']}\") i = 0 for index, row in df.query('grade ==", "i in range(0, len(self.owned_grades)): term1 = term1 + self.owned_grades[i] *", "{avg} -> {int(round(avg / 30 * 110, 0))} / 110\")", "elif args.floor: sol = [math.floor(x) for x in sol] else:", "import math import numpy as np import pandas import scipy.optimize", "average of the grades over the credits, given the desired", "+= 1 return 0 def main(): name = \"calcGrades\" description", "to get a certain weight average of the grades over", "v[i] * self.credits[i] return (term1 + term2) / self.tot_credits def", "if not res.success: return None return res.x def error_no_solution(): print(\"Mean", "import sys import argparse def ineq_constraint_1(v): return np.array([vi for vi", "ineq_constraint_1(v): return np.array([vi for vi in v]) def ineq_constraint_2(v): return", "[math.ceil(x) for x in sol] elif args.floor: sol = [math.floor(x)", "for x in sol] elif args.floor: sol = [math.floor(x) for", "sol = [math.ceil(x) for x in sol] elif args.floor: sol", "return np.array([vi for vi in v]) def ineq_constraint_2(v): return np.array([-vi", "sum(self.owned_credits) + sum(self.credits) def weight_average(self, v): term1 = 0 term2", "args.mean courses = args.file solver = WeightAverage(mean, courses) sol =", "= [math.floor(x) for x in sol] else: sol = [round(x)", "0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {int(sol[i])}\") i += 1 return", "np import pandas import scipy.optimize import sys import argparse def", "{int(round(avg / 30 * 110, 0))} / 110\") if sol", "- self.expected_mean def solve(self): cons = ( {'type': 'eq', 'fun':", "def ineq_constraint_2(v): return np.array([-vi + 30 for vi in v])", "round to solution') parser.add_argument('--ceil', default=False, action='store_true', help='apply ceil operation instead", "1 return 0 def main(): name = \"calcGrades\" description =", "in range(0, len(v)): term2 = term2 + v[i] * self.credits[i]", "an utility which purpose is to compute the minimum grades", "= self.df[['credits', 'grade']].query('grade > 0')[['credits']].transpose().to_numpy()[0] self.owned_grades = self.df[['grade']].query('grade > 0').transpose().to_numpy()[0]", "import scipy.optimize import sys import argparse def ineq_constraint_1(v): return np.array([vi", "for index, row in df.query('grade == 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']},", "sol = [math.floor(x) for x in sol] else: sol =", "(default: courses.csv)') parser.add_argument('--floor', default=False, action='store_true', help='apply floor operation instead of", "i in range(0, len(v)): term2 = term2 + v[i] *", "if sol is None: print(\"Not Possible with current grades :(\")", "row in df.query('grade > 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {row['grade']}\")", "= self.df[['credits', 'grade']].query('grade == 0')[['credits']].transpose().to_numpy()[0] self.grade_initial_sol = np.array([mean for _", "not possible with current vote :(\") exit(0) def output_result(solver, sol):", "the minimum grades required to get a certain weight average", "def ineq_constraint_1(v): return np.array([vi for vi in v]) def ineq_constraint_2(v):", "= term2 + v[i] * self.credits[i] return (term1 + term2)", "instead of round to solution') args = parser.parse_args() mean =", "30 * 110, 0))} / 110\") if sol is None:", "sum(self.credits) def weight_average(self, v): term1 = 0 term2 = 0", "+ sum(self.credits) def weight_average(self, v): term1 = 0 term2 =", "error_no_solution(): print(\"Mean not possible with current vote :(\") exit(0) def", "+ self.owned_grades[i] * self.owned_credits[i] for i in range(0, len(v)): term2", "> 0').iterrows(): print(f\"'{row['name']}', credits: {row['credits']}, grade {row['grade']}\") i = 0", "= 0 for index, row in df.query('grade == 0').iterrows(): print(f\"'{row['name']}',", "parser.parse_args() mean = args.mean courses = args.file solver = WeightAverage(mean,", "grades over the credits, given the desired output and the", "grades required to get a certain weight average of the", "of round to solution') args = parser.parse_args() mean = args.mean", "0')[['credits']].transpose().to_numpy()[0] self.owned_grades = self.df[['grade']].query('grade > 0').transpose().to_numpy()[0] self.tot_credits = sum(self.owned_credits) +", "in sol] output_result(solver, sol) return 0 if __name__ == '__main__':", "compute the minimum grades required to get a certain weight", "= \"\"\"CalcGrades is an utility which purpose is to compute", "grade {row['grade']}\") i = 0 for index, row in df.query('grade", "grades :(\") exit() for index, row in df.query('grade > 0').iterrows():", "else: sol = [round(x) for x in sol] output_result(solver, sol)", "is None: print(\"Not Possible with current grades :(\") exit() for" ]
[ "if environment_matrix is None and not opts.urn: raise TypeError(\"Missing required", "pulumi.Input[bool] fail_fast: If true, only a single attempt at most", "pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where the results for the matrix are written.", "implies no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @property @pulumi.getter(name=\"invalidMatrixDetails\") def", "pulumi.get(self, \"test_specification\") @property @pulumi.getter def timestamp(self) -> pulumi.Output[str]: \"\"\" The", "be deleted from Pulumi state. :param str resource_name: The name", "-> pulumi.Output[str]: \"\"\" Describes why the matrix is considered invalid.", "\"test_specification\", value) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]: \"\"\" Information", "to get an existing resource') __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] =", "more limited because of that expectation. :param pulumi.Input[int] flaky_test_attempts: The", "= ['TestMatrixArgs', 'TestMatrix'] @pulumi.input_type class TestMatrixArgs: def __init__(__self__, *, environment_matrix:", ":param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The devices the tests are being executed", "options to be a ResourceOptions instance') if opts.version is None:", "execution/shard in the matrix. Flaky test attempts are not affected.", "raise TypeError('Expected resource options to be a ResourceOptions instance') if", "client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']: \"\"\" Information about the client which invoked", "pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information about the client which invoked the test.", "client_info if environment_matrix is None and not opts.urn: raise TypeError(\"Missing", "to run the test. :param pulumi.Input['ClientInfoArgs'] client_info: Information about the", "value) @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> Optional[pulumi.Input[int]]: \"\"\" The number", "Note - this resource's API doesn't support deletion. When deleted,", "for the matrix are written. \"\"\" return pulumi.get(self, \"result_storage\") @result_storage.setter", "The devices the tests are being executed on. \"\"\" return", "by the service. \"\"\" return pulumi.get(self, \"test_matrix_id\") @property @pulumi.getter(name=\"testSpecification\") def", "test. :param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The devices the tests are being", "each execution/shard in the matrix. Flaky test attempts are not", "project(self) -> Optional[pulumi.Input[str]]: \"\"\" The cloud project that owns the", "TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = client_info if environment_matrix is None and not", "pulumi.Output['outputs.TestSpecificationResponse']: \"\"\" How to run the test. \"\"\" return pulumi.get(self,", "matrix. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"testExecutions\") def test_executions(self) ->", "- this resource's API doesn't support deletion. When deleted, the", "@overload def __init__(__self__, resource_name: str, args: TestMatrixArgs, opts: Optional[pulumi.ResourceOptions] =", "doesn't support deletion. When deleted, the resource will persist on", "environment_matrix is None and not opts.urn: raise TypeError(\"Missing required property", "pulumi.Input['EnvironmentMatrixArgs']: \"\"\" The devices the tests are being executed on.", "opts: Optional[pulumi.ResourceOptions] = None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None, environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]]", "greater for fail-fast matrices and support is more limited because", "property 'environment_matrix'\") __props__.__dict__[\"environment_matrix\"] = environment_matrix __props__.__dict__[\"fail_fast\"] = fail_fast __props__.__dict__[\"flaky_test_attempts\"] =", "resource options to be a ResourceOptions instance') if opts.version is", "are not affected. Normally, 2 or more attempts are made", "the resulting resource. :param pulumi.Input[str] id: The unique provider ID", "fail-fast matrices and support is more limited because of that", "- INVALID_ARGUMENT - if the request is malformed or if", "is currently not supported for this resource. Note - this", "None super(TestMatrix, __self__).__init__( 'google-native:testing/v1:TestMatrix', resource_name, __props__, opts) @staticmethod def get(resource_name:", "\"project\") @project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"project\", value) @property", "to run each execution/shard in the matrix. Flaky test attempts", "opts = _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not", "None): \"\"\" The set of arguments for constructing a TestMatrix", "property 'result_storage'\") __props__.__dict__[\"result_storage\"] = result_storage if test_specification is None and", "@property @pulumi.getter(name=\"invalidMatrixDetails\") def invalid_matrix_details(self) -> pulumi.Output[str]: \"\"\" Describes why the", "is not None: pulumi.set(__self__, \"flaky_test_attempts\", flaky_test_attempts) if project is not", "Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): if opts", "* from ._inputs import * __all__ = ['TestMatrixArgs', 'TestMatrix'] @pulumi.input_type", "INVALID state. \"\"\" return pulumi.get(self, \"invalid_matrix_details\") @property @pulumi.getter(name=\"outcomeSummary\") def outcome_summary(self)", "TestMatrix resource's state with the given name, id, and optional", "def test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\" The list of test executions", "__props__.__dict__[\"project\"] = project __props__.__dict__[\"request_id\"] = request_id if result_storage is None", "environment_matrix: pulumi.Input['EnvironmentMatrixArgs'], result_storage: pulumi.Input['ResultStorageArgs'], test_specification: pulumi.Input['TestSpecificationArgs'], client_info: Optional[pulumi.Input['ClientInfoArgs']] = None,", "deleted from Pulumi state. :param str resource_name: The name of", "def result_storage(self) -> pulumi.Input['ResultStorageArgs']: \"\"\" Where the results for the", "Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): \"\"\" Creates", "to use to populate this resource's properties. :param pulumi.ResourceOptions opts:", "resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args,", "return pulumi.get(self, \"project\") @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']: \"\"\"", "\"\"\" return pulumi.get(self, \"project\") @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']:", "is more limited because of that expectation. :param pulumi.Input[int] flaky_test_attempts:", "is detected. This feature is for latency sensitive workloads. The", "matrix. \"\"\" pulumi.set(__self__, \"environment_matrix\", environment_matrix) pulumi.set(__self__, \"result_storage\", result_storage) pulumi.set(__self__, \"test_specification\",", "from ... import _utilities from . import outputs from ._enums", "@property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Input['ResultStorageArgs']: \"\"\" Where the results", "the tests are being executed on. :param pulumi.Input['ResultStorageArgs'] result_storage: Where", "which invoked the test. \"\"\" return pulumi.get(self, \"client_info\") @client_info.setter def", "time this test matrix was initially created. \"\"\" return pulumi.get(self,", "Pulumi SDK Generator. *** # *** Do not edit by", "args: The arguments to use to populate this resource's properties.", "authorized to write to project - INVALID_ARGUMENT - if the", "@property @pulumi.getter(name=\"testExecutions\") def test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\" The list of", "resource will persist on Google Cloud even though it will", "__props__.__dict__[\"client_info\"] = client_info if environment_matrix is None and not opts.urn:", "\"\"\" return pulumi.get(self, \"test_matrix_id\") @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']:", "_utilities.get_version() if opts.id is None: if __props__ is not None:", "__init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions,", "project: The cloud project that owns the test matrix. \"\"\"", "to use at most 2000 devices in parallel. May return", "def project(self) -> pulumi.Output[str]: \"\"\" The cloud project that owns", "The list of test executions that the service creates for", "TestMatrix resource. :param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The devices the tests are", "pulumi.set(self, \"client_info\", value) @property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> Optional[pulumi.Input[bool]]: \"\"\"", "are being executed on. \"\"\" return pulumi.get(self, \"environment_matrix\") @environment_matrix.setter def", "= test_specification __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"state\"] =", "None, request_id: Optional[pulumi.Input[str]] = None, result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification:", "__props__=__props__) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']: \"\"\" Information about", "@pulumi.getter(name=\"testExecutions\") def test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\" The list of test", "pulumi.get(self, \"fail_fast\") @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> pulumi.Output[int]: \"\"\" The", "pulumi.Output[str]: \"\"\" Output Only. The overall outcome of the test.", "affected. Normally, 2 or more attempts are made if a", "pulumi.set(__self__, \"environment_matrix\", environment_matrix) pulumi.set(__self__, \"result_storage\", result_storage) pulumi.set(__self__, \"test_specification\", test_specification) if", "*** import warnings import pulumi import pulumi.runtime from typing import", "def fail_fast(self) -> pulumi.Output[bool]: \"\"\" If true, only a single", "\"flaky_test_attempts\") @property @pulumi.getter(name=\"invalidMatrixDetails\") def invalid_matrix_details(self) -> pulumi.Output[str]: \"\"\" Describes why", "the test. \"\"\" return pulumi.get(self, \"test_specification\") @test_specification.setter def test_specification(self, value:", "= None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): \"\"\" Creates and", ":param pulumi.Input['ClientInfoArgs'] client_info: Information about the client which invoked the", "is None: if __props__ is not None: raise TypeError('__props__ is", "of test executions that the service creates for this matrix.", "tests are being executed on. :param pulumi.Input['ResultStorageArgs'] result_storage: Where the", "str resource_name: The name of the resource. :param TestMatrixArgs args:", "may be significantly greater for fail-fast matrices and support is", "and not opts.urn: raise TypeError(\"Missing required property 'test_specification'\") __props__.__dict__[\"test_specification\"] =", "value) @property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> Optional[pulumi.Input[bool]]: \"\"\" If true,", "of the resource. :param TestMatrixArgs args: The arguments to use", "resulting resource. :param pulumi.Input[str] id: The unique provider ID of", "def timestamp(self) -> pulumi.Output[str]: \"\"\" The time this test matrix", "number of reruns allowed is 10. Default is 0, which", "is 0, which implies no reruns. :param pulumi.Input[str] project: The", "which implies no reruns. :param pulumi.Input[str] project: The cloud project", "implies no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @flaky_test_attempts.setter def flaky_test_attempts(self,", "test_specification: How to run the test. \"\"\" ... @overload def", "valid when passed in combination with a valid opts.id to", "'result_storage'\") __props__.__dict__[\"result_storage\"] = result_storage if test_specification is None and not", "Information about the client which invoked the test. :param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]", "more limited because of that expectation. \"\"\" return pulumi.get(self, \"fail_fast\")", "return pulumi.get(self, \"test_specification\") @test_specification.setter def test_specification(self, value: pulumi.Input['TestSpecificationArgs']): pulumi.set(self, \"test_specification\",", "a TestExecution should be re-attempted if one or more of", "test_specification(self, value: pulumi.Input['TestSpecificationArgs']): pulumi.set(self, \"test_specification\", value) @property @pulumi.getter(name=\"clientInfo\") def client_info(self)", "persist on Google Cloud even though it will be deleted", "\"test_matrix_id\") @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']: \"\"\" How to", "value) @property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: \"\"\" The cloud", "be a ResourceOptions instance') if opts.version is None: opts.version =", "def client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]: \"\"\" Information about the client which", "None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): if opts is None:", "*** WARNING: this file was generated by the Pulumi SDK", "resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource.", "None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"test_specification\"] = None __props__.__dict__[\"timestamp\"] = None", "The maximum number of reruns allowed is 10. Default is", "implies no reruns. :param pulumi.Input[str] project: The cloud project that", "not None: pulumi.set(__self__, \"project\", project) if request_id is not None:", "\"\"\" Indicates the current progress of the test matrix. \"\"\"", "value) class TestMatrix(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]", "attempt at most will be made to run each execution/shard", "client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]: \"\"\" Information about the client which invoked", "executed on. \"\"\" return pulumi.get(self, \"environment_matrix\") @environment_matrix.setter def environment_matrix(self, value:", "name, id, and optional extra properties used to qualify the", "= None, __props__=None): \"\"\" Creates and runs a matrix of", "instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id", "pulumi.set(__self__, \"test_specification\", test_specification) if client_info is not None: pulumi.set(__self__, \"client_info\",", "the client which invoked the test. :param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The", "state UNSUPPORTED. A test matrix is limited to use at", "use too many simultaneous devices. Auto-naming is currently not supported", "arguments to use to populate this resource's properties. :param pulumi.ResourceOptions", "@pulumi.getter(name=\"failFast\") def fail_fast(self) -> Optional[pulumi.Input[bool]]: \"\"\" If true, only a", "not authorized to write to project - INVALID_ARGUMENT - if", "def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(TestMatrixArgs,", "@pulumi.getter def project(self) -> pulumi.Output[str]: \"\"\" The cloud project that", "request_id) @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']: \"\"\" The devices", "\"\"\" return pulumi.get(self, \"client_info\") @client_info.setter def client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self,", "@pulumi.getter(name=\"requestId\") def request_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, \"request_id\") @request_id.setter def", "= None, fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts: Optional[pulumi.Input[int]] = None,", "\"\"\" return pulumi.get(self, \"flaky_test_attempts\") @property @pulumi.getter(name=\"invalidMatrixDetails\") def invalid_matrix_details(self) -> pulumi.Output[str]:", "__props__.__dict__[\"result_storage\"] = result_storage if test_specification is None and not opts.urn:", "Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None, environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None, fail_fast: Optional[pulumi.Input[bool]] =", "resource. Note - this resource's API doesn't support deletion. When", "latency sensitive workloads. The incidence of execution failures may be", "pulumi.Input['ResultStorageArgs'], test_specification: pulumi.Input['TestSpecificationArgs'], client_info: Optional[pulumi.Input['ClientInfoArgs']] = None, fail_fast: Optional[pulumi.Input[bool]] =", "- if the request is malformed or if the matrix", "__props__.__dict__[\"test_specification\"] = test_specification __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"state\"]", "pulumi.Input['TestSpecificationArgs']): pulumi.set(self, \"test_specification\", value) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]:", "be significantly greater for fail-fast matrices and support is more", "pulumi.Output[str]: \"\"\" The cloud project that owns the test matrix.", "\"\"\" return pulumi.get(self, \"result_storage\") @property @pulumi.getter def state(self) -> pulumi.Output[str]:", "Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): \"\"\" Creates and runs a matrix", "if result_storage is None and not opts.urn: raise TypeError(\"Missing required", "even though it will be deleted from Pulumi state. :param", "@pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']: \"\"\" The devices the tests", "for fail-fast matrices and support is more limited because of", "when the test matrix state is FINISHED. \"\"\" return pulumi.get(self,", "matrix are written. :param pulumi.Input['TestSpecificationArgs'] test_specification: How to run the", "a single attempt at most will be made to run", "\"client_info\") @client_info.setter def client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self, \"client_info\", value) @property", "\"\"\" return pulumi.get(self, \"fail_fast\") @fail_fast.setter def fail_fast(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self,", "pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How to run the test. \"\"\" ... @overload", "@property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']: \"\"\" Information about the", "the test matrix. \"\"\" return pulumi.get(self, \"project\") @property @pulumi.getter(name=\"resultStorage\") def", "if request_id is not None: pulumi.set(__self__, \"request_id\", request_id) @property @pulumi.getter(name=\"environmentMatrix\")", "class TestMatrix(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] =", "executed on. :param pulumi.Input[bool] fail_fast: If true, only a single", "you're certain you know what you are doing! *** import", "required property 'result_storage'\") __props__.__dict__[\"result_storage\"] = result_storage if test_specification is None", "coding=utf-8 # *** WARNING: this file was generated by the", "Output Only. The overall outcome of the test. Only set", "return TestMatrix(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']:", "test matrix. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"testExecutions\") def test_executions(self)", "raise TypeError(\"Missing required property 'environment_matrix'\") __props__.__dict__[\"environment_matrix\"] = environment_matrix __props__.__dict__[\"fail_fast\"] =", "file was generated by the Pulumi SDK Generator. *** #", "of the following canonical error codes: - PERMISSION_DENIED - if", "Optional[pulumi.Input[int]]: \"\"\" The number of times a TestExecution should be", "resource. :param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information about the client which invoked", "and runs a matrix of tests according to the given", "combination with a valid opts.id to get an existing resource')", "\"client_info\") @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\" The devices", "if opts.version is None: opts.version = _utilities.get_version() if opts.id is", "in the matrix. Flaky test attempts are not affected. Normally,", "being executed on. \"\"\" return pulumi.get(self, \"environment_matrix\") @property @pulumi.getter(name=\"failFast\") def", "str, opts: Optional[pulumi.ResourceOptions] = None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None, environment_matrix:", "def state(self) -> pulumi.Output[str]: \"\"\" Indicates the current progress of", "expectation. :param pulumi.Input[int] flaky_test_attempts: The number of times a TestExecution", "not opts.urn: raise TypeError(\"Missing required property 'result_storage'\") __props__.__dict__[\"result_storage\"] = result_storage", "pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The devices the tests are being executed on.", "\"client_info\", client_info) if fail_fast is not None: pulumi.set(__self__, \"fail_fast\", fail_fast)", "fail_fast(self) -> Optional[pulumi.Input[bool]]: \"\"\" If true, only a single attempt", "__props__.__dict__[\"environment_matrix\"] = None __props__.__dict__[\"fail_fast\"] = None __props__.__dict__[\"flaky_test_attempts\"] = None __props__.__dict__[\"invalid_matrix_details\"]", "test matrix. \"\"\" pulumi.set(__self__, \"environment_matrix\", environment_matrix) pulumi.set(__self__, \"result_storage\", result_storage) pulumi.set(__self__,", "= None __props__.__dict__[\"project\"] = None __props__.__dict__[\"result_storage\"] = None __props__.__dict__[\"state\"] =", "matrix. Flaky test attempts are not affected. Normally, 2 or", "= None __props__.__dict__[\"flaky_test_attempts\"] = None __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] =", "None __props__.__dict__[\"result_storage\"] = None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] = None", "re-attempted if one or more of its test cases fail", "-> pulumi.Input['ResultStorageArgs']: \"\"\" Where the results for the matrix are", "pulumi.Input['TestSpecificationArgs'], client_info: Optional[pulumi.Input['ClientInfoArgs']] = None, fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts:", "supported for this resource. Note - this resource's API doesn't", "sensitive workloads. The incidence of execution failures may be significantly", "\"\"\" Information about the client which invoked the test. \"\"\"", "passed in combination with a valid opts.id to get an", "__props__.__dict__[\"client_info\"] = None __props__.__dict__[\"environment_matrix\"] = None __props__.__dict__[\"fail_fast\"] = None __props__.__dict__[\"flaky_test_attempts\"]", "@pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> Optional[pulumi.Input[int]]: \"\"\" The number of times", "__props__.__dict__[\"request_id\"] = request_id if result_storage is None and not opts.urn:", "return pulumi.get(self, \"request_id\") @request_id.setter def request_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"request_id\",", "def outcome_summary(self) -> pulumi.Output[str]: \"\"\" Output Only. The overall outcome", "-> pulumi.Input['TestSpecificationArgs']: \"\"\" How to run the test. \"\"\" return", "How to run the test. \"\"\" ... @overload def __init__(__self__,", "matrix tries to use too many simultaneous devices. Auto-naming is", "@pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']: \"\"\" How to run the", "reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @flaky_test_attempts.setter def flaky_test_attempts(self, value: Optional[pulumi.Input[int]]):", "existing resource') __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = client_info if environment_matrix", "pulumi.set(__self__, \"client_info\", client_info) if fail_fast is not None: pulumi.set(__self__, \"fail_fast\",", "the user is not authorized to write to project -", "@client_info.setter def client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self, \"client_info\", value) @property @pulumi.getter(name=\"failFast\")", "properties used to qualify the lookup. :param str resource_name: The", "project: Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] = None, result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]]", "with the given name, id, and optional extra properties used", "reruns. :param pulumi.Input[str] project: The cloud project that owns the", "test matrix state is FINISHED. \"\"\" return pulumi.get(self, \"outcome_summary\") @property", "at most 2000 devices in parallel. May return any of", "to be a ResourceOptions instance') if opts.version is None: opts.version", "state with the given name, id, and optional extra properties", "name of the resulting resource. :param pulumi.Input[str] id: The unique", "is FINISHED. \"\"\" return pulumi.get(self, \"outcome_summary\") @property @pulumi.getter def project(self)", "and optional extra properties used to qualify the lookup. :param", "__props__.__dict__[\"environment_matrix\"] = environment_matrix __props__.__dict__[\"fail_fast\"] = fail_fast __props__.__dict__[\"flaky_test_attempts\"] = flaky_test_attempts __props__.__dict__[\"project\"]", "@property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> pulumi.Output[bool]: \"\"\" If true, only", "\"\"\" return pulumi.get(self, \"project\") @project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self,", "made to run each execution/shard in the matrix. Flaky test", "result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']: \"\"\" Where the results for the matrix", "\"\"\" Get an existing TestMatrix resource's state with the given", "are written. \"\"\" return pulumi.get(self, \"result_storage\") @result_storage.setter def result_storage(self, value:", "Optional[pulumi.ResourceOptions] = None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None, environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] =", "of the test. Only set when the test matrix state", "* __all__ = ['TestMatrixArgs', 'TestMatrix'] @pulumi.input_type class TestMatrixArgs: def __init__(__self__,", "pulumi.get(self, \"flaky_test_attempts\") @flaky_test_attempts.setter def flaky_test_attempts(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, \"flaky_test_attempts\", value)", "@fail_fast.setter def fail_fast(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, \"fail_fast\", value) @property @pulumi.getter(name=\"flakyTestAttempts\")", "to run the test. \"\"\" return pulumi.get(self, \"test_specification\") @test_specification.setter def", "pulumi.set(self, \"request_id\", value) class TestMatrix(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str,", "@property @pulumi.getter(name=\"outcomeSummary\") def outcome_summary(self) -> pulumi.Output[str]: \"\"\" Output Only. The", "the resource. :param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information about the client which", "the INVALID state. \"\"\" return pulumi.get(self, \"invalid_matrix_details\") @property @pulumi.getter(name=\"outcomeSummary\") def", "= environment_matrix __props__.__dict__[\"fail_fast\"] = fail_fast __props__.__dict__[\"flaky_test_attempts\"] = flaky_test_attempts __props__.__dict__[\"project\"] =", "and support is more limited because of that expectation. \"\"\"", "Optional[pulumi.Input[str]]): pulumi.set(self, \"project\", value) @property @pulumi.getter(name=\"requestId\") def request_id(self) -> Optional[pulumi.Input[str]]:", "def result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']: \"\"\" Where the results for the", "edit by hand unless you're certain you know what you", "being executed on. :param pulumi.Input[bool] fail_fast: If true, only a", "\"fail_fast\") @fail_fast.setter def fail_fast(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, \"fail_fast\", value) @property", "The set of arguments for constructing a TestMatrix resource. :param", "written. :param pulumi.Input['TestSpecificationArgs'] test_specification: How to run the test. :param", "pulumi.set(self, \"fail_fast\", value) @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> Optional[pulumi.Input[int]]: \"\"\"", "pulumi.get(self, \"invalid_matrix_details\") @property @pulumi.getter(name=\"outcomeSummary\") def outcome_summary(self) -> pulumi.Output[str]: \"\"\" Output", "the test. :param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The devices the tests are", "\"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] =", "return pulumi.get(self, \"flaky_test_attempts\") @flaky_test_attempts.setter def flaky_test_attempts(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, \"flaky_test_attempts\",", "typing import Any, Mapping, Optional, Sequence, Union, overload from ...", "**kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else:", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"project\", value) @property @pulumi.getter(name=\"requestId\") def request_id(self) ->", "for the resource. :param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information about the client", "def invalid_matrix_details(self) -> pulumi.Output[str]: \"\"\" Describes why the matrix is", "def request_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, \"request_id\") @request_id.setter def request_id(self,", "How to run the test. \"\"\" return pulumi.get(self, \"test_specification\") @property", "the test. \"\"\" return pulumi.get(self, \"test_specification\") @property @pulumi.getter def timestamp(self)", "client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None, environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None, fail_fast: Optional[pulumi.Input[bool]]", "resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options", "test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): if opts is None: opts", "following canonical error codes: - PERMISSION_DENIED - if the user", "matrix are written. \"\"\" return pulumi.get(self, \"result_storage\") @property @pulumi.getter def", ":param pulumi.ResourceOptions opts: Options for the resource. \"\"\" opts =", "\"\"\" return pulumi.get(self, \"test_specification\") @test_specification.setter def test_specification(self, value: pulumi.Input['TestSpecificationArgs']): pulumi.set(self,", "None __props__.__dict__[\"environment_matrix\"] = None __props__.__dict__[\"fail_fast\"] = None __props__.__dict__[\"flaky_test_attempts\"] = None", "the results for the matrix are written. :param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification:", "= _utilities.get_version() if opts.id is None: if __props__ is not", "None: if __props__ is not None: raise TypeError('__props__ is only", "client which invoked the test. :param pulumi.Input[bool] fail_fast: If true,", "the current progress of the test matrix. \"\"\" return pulumi.get(self,", "run the test. :param pulumi.Input['ClientInfoArgs'] client_info: Information about the client", "tests are being executed on. \"\"\" return pulumi.get(self, \"environment_matrix\") @property", "result_storage if test_specification is None and not opts.urn: raise TypeError(\"Missing", "-> 'TestMatrix': \"\"\" Get an existing TestMatrix resource's state with", "@pulumi.getter(name=\"outcomeSummary\") def outcome_summary(self) -> pulumi.Output[str]: \"\"\" Output Only. The overall", "\"\"\" return pulumi.get(self, \"result_storage\") @result_storage.setter def result_storage(self, value: pulumi.Input['ResultStorageArgs']): pulumi.set(self,", "Where the results for the matrix are written. :param pulumi.Input[pulumi.InputType['TestSpecificationArgs']]", "Get an existing TestMatrix resource's state with the given name,", "flaky_test_attempts(self) -> Optional[pulumi.Input[int]]: \"\"\" The number of times a TestExecution", "pulumi.Output[int]: \"\"\" The number of times a TestExecution should be", "TypeError(\"Missing required property 'result_storage'\") __props__.__dict__[\"result_storage\"] = result_storage if test_specification is", "value: Optional[pulumi.Input[int]]): pulumi.set(self, \"flaky_test_attempts\", value) @property @pulumi.getter def project(self) ->", "__props__.__dict__[\"flaky_test_attempts\"] = None __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"project\"]", "-> pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\" The list of test executions that the", "being executed on. \"\"\" return pulumi.get(self, \"environment_matrix\") @environment_matrix.setter def environment_matrix(self,", "@property @pulumi.getter def timestamp(self) -> pulumi.Output[str]: \"\"\" The time this", "None: raise TypeError('__props__ is only valid when passed in combination", "is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs)", "resource. \"\"\" ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args,", "None: opts.version = _utilities.get_version() if opts.id is None: if __props__", ":param str resource_name: The name of the resource. :param pulumi.ResourceOptions", "**kwargs): resource_args, opts = _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args", "the test. \"\"\" ... @overload def __init__(__self__, resource_name: str, args:", "= None, project: Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] = None):", "is more limited because of that expectation. \"\"\" return pulumi.get(self,", "= pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options", "of the resource. :param pulumi.ResourceOptions opts: Options for the resource.", "Indicates the current progress of the test matrix. \"\"\" return", "__props__=None): if opts is None: opts = pulumi.ResourceOptions() if not", "cloud project that owns the test matrix. \"\"\" pulumi.set(__self__, \"environment_matrix\",", "@pulumi.getter(name=\"clientInfo\") def client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']: \"\"\" Information about the client", "pulumi.Output[str]: \"\"\" The time this test matrix was initially created.", "to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for", "this file was generated by the Pulumi SDK Generator. ***", "result_storage is None and not opts.urn: raise TypeError(\"Missing required property", "get an existing resource') __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = client_info", "... @overload def __init__(__self__, resource_name: str, args: TestMatrixArgs, opts: Optional[pulumi.ResourceOptions]", "pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence,", "'TestMatrix': \"\"\" Get an existing TestMatrix resource's state with the", "'test_specification'\") __props__.__dict__[\"test_specification\"] = test_specification __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None", "pulumi.get(self, \"result_storage\") @property @pulumi.getter def state(self) -> pulumi.Output[str]: \"\"\" Indicates", "None: pulumi.set(__self__, \"flaky_test_attempts\", flaky_test_attempts) if project is not None: pulumi.set(__self__,", "Optional[pulumi.Input[bool]]): pulumi.set(self, \"fail_fast\", value) @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> Optional[pulumi.Input[int]]:", "None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if", "for this resource. Note - this resource's API doesn't support", "list of test executions that the service creates for this", "None and not opts.urn: raise TypeError(\"Missing required property 'environment_matrix'\") __props__.__dict__[\"environment_matrix\"]", "is not None: pulumi.set(__self__, \"fail_fast\", fail_fast) if flaky_test_attempts is not", "flaky_test_attempts(self) -> pulumi.Output[int]: \"\"\" The number of times a TestExecution", "if __props__ is not None: raise TypeError('__props__ is only valid", "-> pulumi.Input['EnvironmentMatrixArgs']: \"\"\" The devices the tests are being executed", "the test. :param pulumi.Input[bool] fail_fast: If true, only a single", "fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts: Optional[pulumi.Input[int]] = None, project: Optional[pulumi.Input[str]]", "this test matrix was initially created. \"\"\" return pulumi.get(self, \"timestamp\")", "Google Cloud even though it will be deleted from Pulumi", "The devices the tests are being executed on. :param pulumi.Input[bool]", "environment_matrix) pulumi.set(__self__, \"result_storage\", result_storage) pulumi.set(__self__, \"test_specification\", test_specification) if client_info is", "def environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\" The devices the tests are", "-> pulumi.Output[str]: \"\"\" Output Only. The overall outcome of the", "will be deleted from Pulumi state. :param str resource_name: The", "pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\" The devices the tests are being executed on.", "None, fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts: Optional[pulumi.Input[int]] = None, project:", "None, request_id: Optional[pulumi.Input[str]] = None): \"\"\" The set of arguments", "__props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = None __props__.__dict__[\"environment_matrix\"] = None __props__.__dict__[\"fail_fast\"]", "tries to use too many simultaneous devices. Auto-naming is currently", "in parallel. May return any of the following canonical error", "matrix is limited to use at most 2000 devices in", "test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\" The list of test executions that", "on. \"\"\" return pulumi.get(self, \"environment_matrix\") @environment_matrix.setter def environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']):", "test. \"\"\" return pulumi.get(self, \"client_info\") @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) ->", "__props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"project\"] = None __props__.__dict__[\"result_storage\"]", "\"invalid_matrix_details\") @property @pulumi.getter(name=\"outcomeSummary\") def outcome_summary(self) -> pulumi.Output[str]: \"\"\" Output Only.", "the following canonical error codes: - PERMISSION_DENIED - if the", "**resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts:", "return pulumi.get(self, \"test_specification\") @property @pulumi.getter def timestamp(self) -> pulumi.Output[str]: \"\"\"", "._inputs import * __all__ = ['TestMatrixArgs', 'TestMatrix'] @pulumi.input_type class TestMatrixArgs:", "client which invoked the test. \"\"\" return pulumi.get(self, \"client_info\") @property", "def environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']: \"\"\" The devices the tests are", "incidence of execution failures may be significantly greater for fail-fast", "on. \"\"\" return pulumi.get(self, \"environment_matrix\") @property @pulumi.getter(name=\"failFast\") def fail_fast(self) ->", "Optional[pulumi.Input['ClientInfoArgs']] = None, fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts: Optional[pulumi.Input[int]] =", "not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a", "support is more limited because of that expectation. \"\"\" return", "opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str,", "\"\"\" The cloud project that owns the test matrix. \"\"\"", "Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities", "opts: Options for the resource. \"\"\" ... def __init__(__self__, resource_name:", "the service. \"\"\" return pulumi.get(self, \"test_matrix_id\") @property @pulumi.getter(name=\"testSpecification\") def test_specification(self)", "import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union,", "significantly greater for fail-fast matrices and support is more limited", "state. :param str resource_name: The name of the resource. :param", "@pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Input['TestSpecificationArgs']: \"\"\" How to run the", "of that expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @fail_fast.setter def fail_fast(self,", "the given name, id, and optional extra properties used to", "extra properties used to qualify the lookup. :param str resource_name:", "a TestMatrix resource. :param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The devices the tests", "resource's API doesn't support deletion. When deleted, the resource will", "@property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> pulumi.Output[int]: \"\"\" The number of", "pulumi.Input['EnvironmentMatrixArgs'], result_storage: pulumi.Input['ResultStorageArgs'], test_specification: pulumi.Input['TestSpecificationArgs'], client_info: Optional[pulumi.Input['ClientInfoArgs']] = None, fail_fast:", "\"\"\" Describes why the matrix is considered invalid. Only useful", "pulumi.Output['outputs.ClientInfoResponse']: \"\"\" Information about the client which invoked the test.", "the test matrix. \"\"\" pulumi.set(__self__, \"environment_matrix\", environment_matrix) pulumi.set(__self__, \"result_storage\", result_storage)", "import _utilities from . import outputs from ._enums import *", "pulumi.Input['ResultStorageArgs'] result_storage: Where the results for the matrix are written.", "detected. This feature is for latency sensitive workloads. The incidence", "project that owns the test matrix. :param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where", "that owns the test matrix. \"\"\" return pulumi.get(self, \"project\") @project.setter", "for the matrix are written. \"\"\" return pulumi.get(self, \"result_storage\") @property", "resource_name: The unique name of the resulting resource. :param pulumi.Input[str]", "only valid when passed in combination with a valid opts.id", "pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = None __props__.__dict__[\"environment_matrix\"] =", "warnings import pulumi import pulumi.runtime from typing import Any, Mapping,", "\"\"\" return pulumi.get(self, \"test_executions\") @property @pulumi.getter(name=\"testMatrixId\") def test_matrix_id(self) -> pulumi.Output[str]:", "are being executed on. :param pulumi.Input['ResultStorageArgs'] result_storage: Where the results", "which invoked the test. \"\"\" return pulumi.get(self, \"client_info\") @property @pulumi.getter(name=\"environmentMatrix\")", "\"\"\" The list of test executions that the service creates", "= fail_fast __props__.__dict__[\"flaky_test_attempts\"] = flaky_test_attempts __props__.__dict__[\"project\"] = project __props__.__dict__[\"request_id\"] =", "None: pulumi.set(__self__, \"fail_fast\", fail_fast) if flaky_test_attempts is not None: pulumi.set(__self__,", "\"\"\" The devices the tests are being executed on. \"\"\"", "is limited to use at most 2000 devices in parallel.", "attempts are not affected. Normally, 2 or more attempts are", "matrix. \"\"\" return pulumi.get(self, \"project\") @project.setter def project(self, value: Optional[pulumi.Input[str]]):", "you know what you are doing! *** import warnings import", "- if the user is not authorized to write to", "which invoked the test. :param pulumi.Input[bool] fail_fast: If true, only", "None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] = None", "set when the test matrix state is FINISHED. \"\"\" return", "API doesn't support deletion. When deleted, the resource will persist", "pulumi.get(self, \"client_info\") @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\" The", "def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"project\", value) @property @pulumi.getter(name=\"requestId\") def", "return pulumi.get(self, \"client_info\") @client_info.setter def client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self, \"client_info\",", "= None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): if opts is", "\"project\", project) if request_id is not None: pulumi.set(__self__, \"request_id\", request_id)", "about the client which invoked the test. :param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix:", "__self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name:", "resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None,", "def test_specification(self, value: pulumi.Input['TestSpecificationArgs']): pulumi.set(self, \"test_specification\", value) @property @pulumi.getter(name=\"clientInfo\") def", "that owns the test matrix. \"\"\" return pulumi.get(self, \"project\") @property", "service. \"\"\" return pulumi.get(self, \"test_matrix_id\") @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) ->", "@environment_matrix.setter def environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self, \"environment_matrix\", value) @property @pulumi.getter(name=\"resultStorage\")", "\"result_storage\", value) @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Input['TestSpecificationArgs']: \"\"\" How", "resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args,", "name of the resource. :param pulumi.ResourceOptions opts: Options for the", ":param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The devices the tests are being executed", "= pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = None __props__.__dict__[\"environment_matrix\"]", "A test matrix is limited to use at most 2000", "pulumi.ResourceOptions opts: Options for the resource. \"\"\" ... def __init__(__self__,", "expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @fail_fast.setter def fail_fast(self, value: Optional[pulumi.Input[bool]]):", "pulumi.Output[str]: \"\"\" Indicates the current progress of the test matrix.", "*, environment_matrix: pulumi.Input['EnvironmentMatrixArgs'], result_storage: pulumi.Input['ResultStorageArgs'], test_specification: pulumi.Input['TestSpecificationArgs'], client_info: Optional[pulumi.Input['ClientInfoArgs']] =", "*** Do not edit by hand unless you're certain you", "error codes: - PERMISSION_DENIED - if the user is not", "pulumi.set(__self__, \"fail_fast\", fail_fast) if flaky_test_attempts is not None: pulumi.set(__self__, \"flaky_test_attempts\",", "constructing a TestMatrix resource. :param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The devices the", "= None, request_id: Optional[pulumi.Input[str]] = None): \"\"\" The set of", "according to the given specifications. Unsupported environments will be returned", "opts: Optional[pulumi.ResourceOptions] = None) -> 'TestMatrix': \"\"\" Get an existing", "\"\"\" return pulumi.get(self, \"fail_fast\") @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> pulumi.Output[int]:", "Auto-naming is currently not supported for this resource. Note -", "more attempts are made if a potential infrastructure issue is", "the given specifications. Unsupported environments will be returned in the", "-> pulumi.Output['outputs.ResultStorageResponse']: \"\"\" Where the results for the matrix are", "None and not opts.urn: raise TypeError(\"Missing required property 'test_specification'\") __props__.__dict__[\"test_specification\"]", "populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the", "TestMatrixArgs args: The arguments to use to populate this resource's", "test_specification(self) -> pulumi.Input['TestSpecificationArgs']: \"\"\" How to run the test. \"\"\"", "doing! *** import warnings import pulumi import pulumi.runtime from typing", "fail_fast) if flaky_test_attempts is not None: pulumi.set(__self__, \"flaky_test_attempts\", flaky_test_attempts) if", "single attempt at most will be made to run each", "tests according to the given specifications. Unsupported environments will be", "outcome of the test. Only set when the test matrix", "if a potential infrastructure issue is detected. This feature is", "is malformed or if the matrix tries to use too", "malformed or if the matrix tries to use too many", "be re-attempted if one or more of its test cases", "currently not supported for this resource. Note - this resource's", "if test_specification is None and not opts.urn: raise TypeError(\"Missing required", "INVALID_ARGUMENT - if the request is malformed or if the", "-> Optional[pulumi.Input[str]]: \"\"\" The cloud project that owns the test", "will be returned in the state UNSUPPORTED. A test matrix", "in the state UNSUPPORTED. A test matrix is limited to", "for the matrix are written. :param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How to", "Optional, Sequence, Union, overload from ... import _utilities from .", "pulumi.Input['ClientInfoArgs'] client_info: Information about the client which invoked the test.", "= None __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"project\"] =", "current progress of the test matrix. \"\"\" return pulumi.get(self, \"state\")", "\"environment_matrix\") @environment_matrix.setter def environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self, \"environment_matrix\", value) @property", "Options for the resource. \"\"\" ... def __init__(__self__, resource_name: str,", "\"flaky_test_attempts\") @flaky_test_attempts.setter def flaky_test_attempts(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, \"flaky_test_attempts\", value) @property", "be returned in the state UNSUPPORTED. A test matrix is", "runs a matrix of tests according to the given specifications.", "opts: Options for the resource. :param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information about", "= None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"timestamp\"] =", "test. :param pulumi.Input['ClientInfoArgs'] client_info: Information about the client which invoked", "of its test cases fail for any reason. The maximum", "TestMatrix(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']: \"\"\"", "\"outcome_summary\") @property @pulumi.getter def project(self) -> pulumi.Output[str]: \"\"\" The cloud", "= request_id if result_storage is None and not opts.urn: raise", "lookup. :param str resource_name: The unique name of the resulting", "opts.id is None: if __props__ is not None: raise TypeError('__props__", "The unique name of the resulting resource. :param pulumi.Input[str] id:", "TestMatrixArgs: def __init__(__self__, *, environment_matrix: pulumi.Input['EnvironmentMatrixArgs'], result_storage: pulumi.Input['ResultStorageArgs'], test_specification: pulumi.Input['TestSpecificationArgs'],", "if fail_fast is not None: pulumi.set(__self__, \"fail_fast\", fail_fast) if flaky_test_attempts", ":param pulumi.Input['ResultStorageArgs'] result_storage: Where the results for the matrix are", "project - INVALID_ARGUMENT - if the request is malformed or", "name of the resource. :param TestMatrixArgs args: The arguments to", "the test. \"\"\" return pulumi.get(self, \"client_info\") @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self)", "to write to project - INVALID_ARGUMENT - if the request", "Mapping, Optional, Sequence, Union, overload from ... import _utilities from", "pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information", "client_info: Information about the client which invoked the test. :param", "cloud project that owns the test matrix. :param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage:", "opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] =", "executed on. \"\"\" return pulumi.get(self, \"environment_matrix\") @property @pulumi.getter(name=\"failFast\") def fail_fast(self)", "id set by the service. \"\"\" return pulumi.get(self, \"test_matrix_id\") @property", "the test. :param pulumi.Input['ClientInfoArgs'] client_info: Information about the client which", "None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"timestamp\"] = None super(TestMatrix, __self__).__init__( 'google-native:testing/v1:TestMatrix',", "Optional[pulumi.ResourceOptions] = None): \"\"\" Creates and runs a matrix of", "qualify the lookup. :param str resource_name: The unique name of", "client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self, \"client_info\", value) @property @pulumi.getter(name=\"failFast\") def fail_fast(self)", "devices the tests are being executed on. :param pulumi.Input[bool] fail_fast:", "0, which implies no reruns. :param pulumi.Input[str] project: The cloud", "is for latency sensitive workloads. The incidence of execution failures", "= None __props__.__dict__[\"test_specification\"] = None __props__.__dict__[\"timestamp\"] = None return TestMatrix(resource_name,", "@pulumi.getter def state(self) -> pulumi.Output[str]: \"\"\" Indicates the current progress", "10. Default is 0, which implies no reruns. :param pulumi.Input[str]", "pulumi.get(self, \"project\") @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']: \"\"\" Where", "pulumi.get(self, \"project\") @project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"project\", value)", "pulumi.set(self, \"environment_matrix\", value) @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Input['ResultStorageArgs']: \"\"\"", "__init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] =", "devices. Auto-naming is currently not supported for this resource. Note", "not edit by hand unless you're certain you know what", "test cases fail for any reason. The maximum number of", "given name, id, and optional extra properties used to qualify", "project __props__.__dict__[\"request_id\"] = request_id if result_storage is None and not", "is not authorized to write to project - INVALID_ARGUMENT -", "id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'TestMatrix': \"\"\" Get", "not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def", "raise TypeError(\"Missing required property 'test_specification'\") __props__.__dict__[\"test_specification\"] = test_specification __props__.__dict__[\"invalid_matrix_details\"] =", "str, args: TestMatrixArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\" Creates and", "'TestMatrix'] @pulumi.input_type class TestMatrixArgs: def __init__(__self__, *, environment_matrix: pulumi.Input['EnvironmentMatrixArgs'], result_storage:", "no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @flaky_test_attempts.setter def flaky_test_attempts(self, value:", "or more of its test cases fail for any reason.", "= TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = client_info if environment_matrix is None and", "= None, environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None, fail_fast: Optional[pulumi.Input[bool]] = None,", "None: pulumi.set(__self__, \"request_id\", request_id) @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']:", "@pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Input['ResultStorageArgs']: \"\"\" Where the results for", "None __props__.__dict__[\"timestamp\"] = None return TestMatrix(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"clientInfo\")", "used to qualify the lookup. :param str resource_name: The unique", "more of its test cases fail for any reason. The", "test. \"\"\" return pulumi.get(self, \"test_specification\") @test_specification.setter def test_specification(self, value: pulumi.Input['TestSpecificationArgs']):", "you are doing! *** import warnings import pulumi import pulumi.runtime", "= result_storage if test_specification is None and not opts.urn: raise", "def request_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"request_id\", value) class TestMatrix(pulumi.CustomResource): @overload", "matrices in the INVALID state. \"\"\" return pulumi.get(self, \"invalid_matrix_details\") @property", "the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param", "of arguments for constructing a TestMatrix resource. :param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix:", "The devices the tests are being executed on. :param pulumi.Input['ResultStorageArgs']", "too many simultaneous devices. Auto-naming is currently not supported for", "None __props__.__dict__[\"fail_fast\"] = None __props__.__dict__[\"flaky_test_attempts\"] = None __props__.__dict__[\"invalid_matrix_details\"] = None", "written. :param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How to run the test. \"\"\"", "optional extra properties used to qualify the lookup. :param str", "existing TestMatrix resource's state with the given name, id, and", "the tests are being executed on. :param pulumi.Input[bool] fail_fast: If", "The time this test matrix was initially created. \"\"\" return", "made if a potential infrastructure issue is detected. This feature", "opts: Optional[pulumi.ResourceOptions] = None): \"\"\" Creates and runs a matrix", "-> pulumi.Output[str]: \"\"\" The time this test matrix was initially", "return pulumi.get(self, \"environment_matrix\") @property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> pulumi.Output[bool]: \"\"\"", "return pulumi.get(self, \"client_info\") @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\"", "import pulumi import pulumi.runtime from typing import Any, Mapping, Optional,", "of execution failures may be significantly greater for fail-fast matrices", "test_specification __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"state\"] = None", "TypeError('Expected resource options to be a ResourceOptions instance') if opts.version", "__init__(__self__, resource_name: str, args: TestMatrixArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\"", "Optional[pulumi.Input[int]] = None, project: Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] =", "TestExecution should be re-attempted if one or more of its", "the matrix are written. :param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How to run", ":param pulumi.Input[str] id: The unique provider ID of the resource", "the resource. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = TestMatrixArgs.__new__(TestMatrixArgs)", "matrix. \"\"\" return pulumi.get(self, \"project\") @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) ->", "pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The devices the tests are being executed on.", "None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"timestamp\"] = None", "str resource_name: The unique name of the resulting resource. :param", "Optional[pulumi.Input[bool]] = None, flaky_test_attempts: Optional[pulumi.Input[int]] = None, project: Optional[pulumi.Input[str]] =", "@overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, client_info:", "None, result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None):", "@pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: \"\"\" The cloud project that", "resource_name: str, args: TestMatrixArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\" Creates", "__props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"test_specification\"] = None __props__.__dict__[\"timestamp\"] = None return", "on Google Cloud even though it will be deleted from", "opts.urn: raise TypeError(\"Missing required property 'environment_matrix'\") __props__.__dict__[\"environment_matrix\"] = environment_matrix __props__.__dict__[\"fail_fast\"]", "most 2000 devices in parallel. May return any of the", "from Pulumi state. :param str resource_name: The name of the", "flaky_test_attempts __props__.__dict__[\"project\"] = project __props__.__dict__[\"request_id\"] = request_id if result_storage is", "Options for the resource. :param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information about the", "= project __props__.__dict__[\"request_id\"] = request_id if result_storage is None and", "the matrix tries to use too many simultaneous devices. Auto-naming", ". import outputs from ._enums import * from ._inputs import", ":param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where the results for the matrix are", "invalid. Only useful for matrices in the INVALID state. \"\"\"", "to use too many simultaneous devices. Auto-naming is currently not", "raise TypeError(\"Missing required property 'result_storage'\") __props__.__dict__[\"result_storage\"] = result_storage if test_specification", "._enums import * from ._inputs import * __all__ = ['TestMatrixArgs',", "because of that expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @fail_fast.setter def", "@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None)", "test. \"\"\" ... @overload def __init__(__self__, resource_name: str, args: TestMatrixArgs,", "request_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, \"request_id\") @request_id.setter def request_id(self, value:", "def fail_fast(self) -> Optional[pulumi.Input[bool]]: \"\"\" If true, only a single", "is 10. Default is 0, which implies no reruns. :param", "if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be", "should be re-attempted if one or more of its test", "from . import outputs from ._enums import * from ._inputs", "The name of the resource. :param pulumi.ResourceOptions opts: Options for", "Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] = None): \"\"\" The set", "\"request_id\") @request_id.setter def request_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"request_id\", value) class", "or if the matrix tries to use too many simultaneous", "return pulumi.get(self, \"project\") @project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"project\",", "of that expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @property @pulumi.getter(name=\"flakyTestAttempts\") def", "times a TestExecution should be re-attempted if one or more", "parallel. May return any of the following canonical error codes:", "TypeError(\"Missing required property 'test_specification'\") __props__.__dict__[\"test_specification\"] = test_specification __props__.__dict__[\"invalid_matrix_details\"] = None", "pulumi.set(self, \"test_specification\", value) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]: \"\"\"", "test matrix. \"\"\" return pulumi.get(self, \"project\") @project.setter def project(self, value:", "no reruns. :param pulumi.Input[str] project: The cloud project that owns", "0, which implies no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @flaky_test_attempts.setter", "if project is not None: pulumi.set(__self__, \"project\", project) if request_id", "@property @pulumi.getter def state(self) -> pulumi.Output[str]: \"\"\" Indicates the current", "environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']: \"\"\" The devices the tests are being", "to qualify the lookup. :param str resource_name: The unique name", "the lookup. :param str resource_name: The unique name of the", "reason. The maximum number of reruns allowed is 10. Default", "resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts:", "arguments for constructing a TestMatrix resource. :param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The", "resource. :param TestMatrixArgs args: The arguments to use to populate", "returned in the state UNSUPPORTED. A test matrix is limited", "__init__(__self__, *, environment_matrix: pulumi.Input['EnvironmentMatrixArgs'], result_storage: pulumi.Input['ResultStorageArgs'], test_specification: pulumi.Input['TestSpecificationArgs'], client_info: Optional[pulumi.Input['ClientInfoArgs']]", "def environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self, \"environment_matrix\", value) @property @pulumi.getter(name=\"resultStorage\") def", "the test. \"\"\" return pulumi.get(self, \"client_info\") @client_info.setter def client_info(self, value:", "\"fail_fast\") @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> pulumi.Output[int]: \"\"\" The number", "\"flaky_test_attempts\", value) @property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: \"\"\" The", "if client_info is not None: pulumi.set(__self__, \"client_info\", client_info) if fail_fast", "= None): \"\"\" Creates and runs a matrix of tests", ":param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info: Information about the client which invoked the", "the test matrix. \"\"\" return pulumi.get(self, \"project\") @project.setter def project(self,", "the client which invoked the test. \"\"\" return pulumi.get(self, \"client_info\")", "@result_storage.setter def result_storage(self, value: pulumi.Input['ResultStorageArgs']): pulumi.set(self, \"result_storage\", value) @property @pulumi.getter(name=\"testSpecification\")", "the matrix. Flaky test attempts are not affected. Normally, 2", "resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. \"\"\"", "many simultaneous devices. Auto-naming is currently not supported for this", "environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self, \"environment_matrix\", value) @property @pulumi.getter(name=\"resultStorage\") def result_storage(self)", "None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None, environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None, fail_fast:", "test executions that the service creates for this matrix. \"\"\"", "__props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"timestamp\"] = None super(TestMatrix, __self__).__init__( 'google-native:testing/v1:TestMatrix', resource_name,", "raise TypeError('__props__ is only valid when passed in combination with", "\"fail_fast\", fail_fast) if flaky_test_attempts is not None: pulumi.set(__self__, \"flaky_test_attempts\", flaky_test_attempts)", "return pulumi.get(self, \"result_storage\") @result_storage.setter def result_storage(self, value: pulumi.Input['ResultStorageArgs']): pulumi.set(self, \"result_storage\",", "written. \"\"\" return pulumi.get(self, \"result_storage\") @property @pulumi.getter def state(self) ->", "@property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\" The devices the", "\"test_specification\") @test_specification.setter def test_specification(self, value: pulumi.Input['TestSpecificationArgs']): pulumi.set(self, \"test_specification\", value) @property", "\"\"\" The number of times a TestExecution should be re-attempted", "@property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]: \"\"\" Information about the", "\"\"\" return pulumi.get(self, \"environment_matrix\") @environment_matrix.setter def environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self,", "str resource_name: The name of the resource. :param pulumi.ResourceOptions opts:", "pulumi.ResourceOptions opts: Options for the resource. \"\"\" opts = pulumi.ResourceOptions.merge(opts,", "\"flaky_test_attempts\", flaky_test_attempts) if project is not None: pulumi.set(__self__, \"project\", project)", "The overall outcome of the test. Only set when the", "Flaky test attempts are not affected. Normally, 2 or more", "a matrix of tests according to the given specifications. Unsupported", "pulumi.get(self, \"state\") @property @pulumi.getter(name=\"testExecutions\") def test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\" The", "property 'test_specification'\") __props__.__dict__[\"test_specification\"] = test_specification __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] =", "devices the tests are being executed on. :param pulumi.Input['ResultStorageArgs'] result_storage:", "infrastructure issue is detected. This feature is for latency sensitive", "Union, overload from ... import _utilities from . import outputs", ":param pulumi.Input[str] project: The cloud project that owns the test", "Information about the client which invoked the test. \"\"\" return", "import outputs from ._enums import * from ._inputs import *", "run each execution/shard in the matrix. Flaky test attempts are", ":param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How to run the test. \"\"\" ...", "project that owns the test matrix. \"\"\" return pulumi.get(self, \"project\")", "overload from ... import _utilities from . import outputs from", "test_specification) if client_info is not None: pulumi.set(__self__, \"client_info\", client_info) if", "value) @property @pulumi.getter(name=\"requestId\") def request_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, \"request_id\")", ":param str resource_name: The name of the resource. :param TestMatrixArgs", "The cloud project that owns the test matrix. \"\"\" pulumi.set(__self__,", "\"environment_matrix\") @property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> pulumi.Output[bool]: \"\"\" If true,", "limited because of that expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @property", "is considered invalid. Only useful for matrices in the INVALID", "limited because of that expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @fail_fast.setter", "owns the test matrix. \"\"\" pulumi.set(__self__, \"environment_matrix\", environment_matrix) pulumi.set(__self__, \"result_storage\",", "which implies no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @property @pulumi.getter(name=\"invalidMatrixDetails\")", "flaky_test_attempts: Optional[pulumi.Input[int]] = None, project: Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]]", "results for the matrix are written. :param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How", "str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args, **kwargs)", "= client_info if environment_matrix is None and not opts.urn: raise", "opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions):", "\"result_storage\") @result_storage.setter def result_storage(self, value: pulumi.Input['ResultStorageArgs']): pulumi.set(self, \"result_storage\", value) @property", "fail for any reason. The maximum number of reruns allowed", "fail_fast is not None: pulumi.set(__self__, \"fail_fast\", fail_fast) if flaky_test_attempts is", "resource. :param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The devices the tests are being", "the test matrix state is FINISHED. \"\"\" return pulumi.get(self, \"outcome_summary\")", "of times a TestExecution should be re-attempted if one or", "opts.urn: raise TypeError(\"Missing required property 'result_storage'\") __props__.__dict__[\"result_storage\"] = result_storage if", "matrix. :param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where the results for the matrix", "@request_id.setter def request_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"request_id\", value) class TestMatrix(pulumi.CustomResource):", "__props__.__dict__[\"timestamp\"] = None return TestMatrix(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"clientInfo\") def", "\"\"\" If true, only a single attempt at most will", "allowed is 10. Default is 0, which implies no reruns.", "__props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"timestamp\"]", "run the test. \"\"\" return pulumi.get(self, \"test_specification\") @test_specification.setter def test_specification(self,", "opts.version is None: opts.version = _utilities.get_version() if opts.id is None:", "hand unless you're certain you know what you are doing!", "None and not opts.urn: raise TypeError(\"Missing required property 'result_storage'\") __props__.__dict__[\"result_storage\"]", "any reason. The maximum number of reruns allowed is 10.", "test matrix. :param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where the results for the", ":param str resource_name: The unique name of the resulting resource.", ":param pulumi.Input[bool] fail_fast: If true, only a single attempt at", "result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): if", "project: The cloud project that owns the test matrix. :param", "If true, only a single attempt at most will be", "How to run the test. \"\"\" return pulumi.get(self, \"test_specification\") @test_specification.setter", "pulumi.set(self, \"flaky_test_attempts\", value) @property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: \"\"\"", "its test cases fail for any reason. The maximum number", "a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version()", "not opts.urn: raise TypeError(\"Missing required property 'environment_matrix'\") __props__.__dict__[\"environment_matrix\"] = environment_matrix", "client_info is not None: pulumi.set(__self__, \"client_info\", client_info) if fail_fast is", "def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]]", "executions that the service creates for this matrix. \"\"\" return", "Optional[pulumi.Input[int]]): pulumi.set(self, \"flaky_test_attempts\", value) @property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]:", "__props__.__dict__[\"timestamp\"] = None super(TestMatrix, __self__).__init__( 'google-native:testing/v1:TestMatrix', resource_name, __props__, opts) @staticmethod", "and not opts.urn: raise TypeError(\"Missing required property 'environment_matrix'\") __props__.__dict__[\"environment_matrix\"] =", "= None, __props__=None): if opts is None: opts = pulumi.ResourceOptions()", "about the client which invoked the test. :param pulumi.Input[bool] fail_fast:", "what you are doing! *** import warnings import pulumi import", "Default is 0, which implies no reruns. :param pulumi.Input[str] project:", "pulumi.get(self, \"environment_matrix\") @environment_matrix.setter def environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self, \"environment_matrix\", value)", "resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[pulumi.InputType['ClientInfoArgs']]", "return pulumi.get(self, \"environment_matrix\") @environment_matrix.setter def environment_matrix(self, value: pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self, \"environment_matrix\",", "result_storage(self, value: pulumi.Input['ResultStorageArgs']): pulumi.set(self, \"result_storage\", value) @property @pulumi.getter(name=\"testSpecification\") def test_specification(self)", "the request is malformed or if the matrix tries to", "= None): \"\"\" The set of arguments for constructing a", "= None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"test_specification\"] = None __props__.__dict__[\"timestamp\"] =", "None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"project\"] = None __props__.__dict__[\"result_storage\"] = None", "Pulumi state. :param str resource_name: The name of the resource.", "it will be deleted from Pulumi state. :param str resource_name:", "on. :param pulumi.Input[bool] fail_fast: If true, only a single attempt", "expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) ->", "Creates and runs a matrix of tests according to the", "invoked the test. :param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The devices the tests", "= None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] =", "TestMatrixArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\" Creates and runs a", "the test. Only set when the test matrix state is", "because of that expectation. :param pulumi.Input[int] flaky_test_attempts: The number of", "str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'TestMatrix': \"\"\"", "2000 devices in parallel. May return any of the following", "environment_matrix __props__.__dict__[\"fail_fast\"] = fail_fast __props__.__dict__[\"flaky_test_attempts\"] = flaky_test_attempts __props__.__dict__[\"project\"] = project", ":param pulumi.ResourceOptions opts: Options for the resource. \"\"\" ... def", "@property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: \"\"\" The cloud project", "= None, request_id: Optional[pulumi.Input[str]] = None, result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None,", "owns the test matrix. \"\"\" return pulumi.get(self, \"project\") @property @pulumi.getter(name=\"resultStorage\")", "the matrix are written. \"\"\" return pulumi.get(self, \"result_storage\") @property @pulumi.getter", "not affected. Normally, 2 or more attempts are made if", "None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"test_specification\"] = None", "@flaky_test_attempts.setter def flaky_test_attempts(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, \"flaky_test_attempts\", value) @property @pulumi.getter", "execution failures may be significantly greater for fail-fast matrices and", "the resource to lookup. :param pulumi.ResourceOptions opts: Options for the", "environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\" The devices the tests are being", "The cloud project that owns the test matrix. \"\"\" return", "return pulumi.get(self, \"test_matrix_id\") @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']: \"\"\"", "value: Optional[pulumi.Input[bool]]): pulumi.set(self, \"fail_fast\", value) @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) ->", "def flaky_test_attempts(self) -> pulumi.Output[int]: \"\"\" The number of times a", "will be made to run each execution/shard in the matrix.", "PERMISSION_DENIED - if the user is not authorized to write", "\"environment_matrix\", environment_matrix) pulumi.set(__self__, \"result_storage\", result_storage) pulumi.set(__self__, \"test_specification\", test_specification) if client_info", "of that expectation. :param pulumi.Input[int] flaky_test_attempts: The number of times", "_utilities from . import outputs from ._enums import * from", "pulumi.get(self, \"flaky_test_attempts\") @property @pulumi.getter(name=\"invalidMatrixDetails\") def invalid_matrix_details(self) -> pulumi.Output[str]: \"\"\" Describes", "None, project: Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] = None): \"\"\"", "pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'TestMatrix': \"\"\" Get an", "codes: - PERMISSION_DENIED - if the user is not authorized", "the service creates for this matrix. \"\"\" return pulumi.get(self, \"test_executions\")", "the matrix are written. :param pulumi.Input['TestSpecificationArgs'] test_specification: How to run", "true, only a single attempt at most will be made", "unique provider ID of the resource to lookup. :param pulumi.ResourceOptions", "set by the service. \"\"\" return pulumi.get(self, \"test_matrix_id\") @property @pulumi.getter(name=\"testSpecification\")", "The number of times a TestExecution should be re-attempted if", "potential infrastructure issue is detected. This feature is for latency", "service creates for this matrix. \"\"\" return pulumi.get(self, \"test_executions\") @property", "Describes why the matrix is considered invalid. Only useful for", "\"result_storage\") @property @pulumi.getter def state(self) -> pulumi.Output[str]: \"\"\" Indicates the", "*args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__)", "-> Optional[pulumi.Input[bool]]: \"\"\" If true, only a single attempt at", "Unique id set by the service. \"\"\" return pulumi.get(self, \"test_matrix_id\")", "__props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"project\"] = None __props__.__dict__[\"result_storage\"] = None __props__.__dict__[\"state\"]", "pulumi.get(self, \"test_matrix_id\") @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']: \"\"\" How", "client_info: Optional[pulumi.Input['ClientInfoArgs']] = None, fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts: Optional[pulumi.Input[int]]", "tests are being executed on. :param pulumi.Input[bool] fail_fast: If true,", "deleted, the resource will persist on Google Cloud even though", "required property 'test_specification'\") __props__.__dict__[\"test_specification\"] = test_specification __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"]", "@property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']: \"\"\" How to run", "opts.urn: raise TypeError(\"Missing required property 'test_specification'\") __props__.__dict__[\"test_specification\"] = test_specification __props__.__dict__[\"invalid_matrix_details\"]", "that expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self)", "= None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"project\"] = None __props__.__dict__[\"result_storage\"] =", "failures may be significantly greater for fail-fast matrices and support", "Optional[pulumi.Input[str]]: \"\"\" The cloud project that owns the test matrix.", "not None: pulumi.set(__self__, \"fail_fast\", fail_fast) if flaky_test_attempts is not None:", "request_id if result_storage is None and not opts.urn: raise TypeError(\"Missing", "support deletion. When deleted, the resource will persist on Google", "client_info) if fail_fast is not None: pulumi.set(__self__, \"fail_fast\", fail_fast) if", "Optional[pulumi.Input[bool]]: \"\"\" If true, only a single attempt at most", "= None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"test_specification\"] =", "super(TestMatrix, __self__).__init__( 'google-native:testing/v1:TestMatrix', resource_name, __props__, opts) @staticmethod def get(resource_name: str,", "test matrix is limited to use at most 2000 devices", "the Pulumi SDK Generator. *** # *** Do not edit", "\"\"\" The set of arguments for constructing a TestMatrix resource.", "*args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None,", "= None super(TestMatrix, __self__).__init__( 'google-native:testing/v1:TestMatrix', resource_name, __props__, opts) @staticmethod def", "\"\"\" The time this test matrix was initially created. \"\"\"", "test_matrix_id(self) -> pulumi.Output[str]: \"\"\" Unique id set by the service.", "pulumi.get(self, \"request_id\") @request_id.setter def request_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"request_id\", value)", "for any reason. The maximum number of reruns allowed is", "properties. :param pulumi.ResourceOptions opts: Options for the resource. \"\"\" ...", "opts.version = _utilities.get_version() if opts.id is None: if __props__ is", "opts: Options for the resource. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))", "def test_specification(self) -> pulumi.Input['TestSpecificationArgs']: \"\"\" How to run the test.", "results for the matrix are written. :param pulumi.Input['TestSpecificationArgs'] test_specification: How", "None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__,", "None) -> 'TestMatrix': \"\"\" Get an existing TestMatrix resource's state", "_internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] =", "return pulumi.get(self, \"result_storage\") @property @pulumi.getter def state(self) -> pulumi.Output[str]: \"\"\"", "2 or more attempts are made if a potential infrastructure", "write to project - INVALID_ARGUMENT - if the request is", "... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts =", "-> pulumi.Output['outputs.ClientInfoResponse']: \"\"\" Information about the client which invoked the", "are made if a potential infrastructure issue is detected. This", "None, project: Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] = None, result_storage:", "The name of the resource. :param TestMatrixArgs args: The arguments", "flaky_test_attempts) if project is not None: pulumi.set(__self__, \"project\", project) if", "the resource will persist on Google Cloud even though it", "from ._inputs import * __all__ = ['TestMatrixArgs', 'TestMatrix'] @pulumi.input_type class", "the state UNSUPPORTED. A test matrix is limited to use", "__all__ = ['TestMatrixArgs', 'TestMatrix'] @pulumi.input_type class TestMatrixArgs: def __init__(__self__, *,", "know what you are doing! *** import warnings import pulumi", "return pulumi.get(self, \"fail_fast\") @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> pulumi.Output[int]: \"\"\"", "cloud project that owns the test matrix. \"\"\" return pulumi.get(self,", "None: pulumi.set(__self__, \"project\", project) if request_id is not None: pulumi.set(__self__,", "pulumi.set(__self__, \"project\", project) if request_id is not None: pulumi.set(__self__, \"request_id\",", "that expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @fail_fast.setter def fail_fast(self, value:", "test_specification is None and not opts.urn: raise TypeError(\"Missing required property", "def result_storage(self, value: pulumi.Input['ResultStorageArgs']): pulumi.set(self, \"result_storage\", value) @property @pulumi.getter(name=\"testSpecification\") def", "__props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"]", "Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None, fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts: Optional[pulumi.Input[int]] =", "reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @property @pulumi.getter(name=\"invalidMatrixDetails\") def invalid_matrix_details(self) ->", "is not None: pulumi.set(__self__, \"client_info\", client_info) if fail_fast is not", "result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): \"\"\"", "pulumi.get(self, \"environment_matrix\") @property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> pulumi.Output[bool]: \"\"\" If", "return pulumi.get(self, \"fail_fast\") @fail_fast.setter def fail_fast(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, \"fail_fast\",", "set of arguments for constructing a TestMatrix resource. :param pulumi.Input['EnvironmentMatrixArgs']", "-> pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\" The devices the tests are being executed", "result_storage: pulumi.Input['ResultStorageArgs'], test_specification: pulumi.Input['TestSpecificationArgs'], client_info: Optional[pulumi.Input['ClientInfoArgs']] = None, fail_fast: Optional[pulumi.Input[bool]]", "test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): \"\"\" Creates and runs a", "\"result_storage\", result_storage) pulumi.set(__self__, \"test_specification\", test_specification) if client_info is not None:", "the test matrix. :param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where the results for", "for the resource. \"\"\" ... def __init__(__self__, resource_name: str, *args,", "outputs from ._enums import * from ._inputs import * __all__", "\"\"\" return pulumi.get(self, \"client_info\") @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']:", "by hand unless you're certain you know what you are", "resource. :param pulumi.Input[str] id: The unique provider ID of the", "How to run the test. :param pulumi.Input['ClientInfoArgs'] client_info: Information about", "= None) -> 'TestMatrix': \"\"\" Get an existing TestMatrix resource's", "deletion. When deleted, the resource will persist on Google Cloud", "@pulumi.getter(name=\"invalidMatrixDetails\") def invalid_matrix_details(self) -> pulumi.Output[str]: \"\"\" Describes why the matrix", "\"client_info\", value) @property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> Optional[pulumi.Input[bool]]: \"\"\" If", "request_id: Optional[pulumi.Input[str]] = None, result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]]", "<reponame>AaronFriel/pulumi-google-native # coding=utf-8 # *** WARNING: this file was generated", "__props__.__dict__[\"test_specification\"] = None __props__.__dict__[\"timestamp\"] = None return TestMatrix(resource_name, opts=opts, __props__=__props__)", "that expectation. :param pulumi.Input[int] flaky_test_attempts: The number of times a", "results for the matrix are written. \"\"\" return pulumi.get(self, \"result_storage\")", "test_specification: pulumi.Input['TestSpecificationArgs'], client_info: Optional[pulumi.Input['ClientInfoArgs']] = None, fail_fast: Optional[pulumi.Input[bool]] = None,", "flaky_test_attempts is not None: pulumi.set(__self__, \"flaky_test_attempts\", flaky_test_attempts) if project is", "in combination with a valid opts.id to get an existing", "= None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]] = None, environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None,", "_utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name,", "pulumi.Output['outputs.ResultStorageResponse']: \"\"\" Where the results for the matrix are written.", "creates for this matrix. \"\"\" return pulumi.get(self, \"test_executions\") @property @pulumi.getter(name=\"testMatrixId\")", ":param pulumi.Input['TestSpecificationArgs'] test_specification: How to run the test. :param pulumi.Input['ClientInfoArgs']", "fail_fast(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, \"fail_fast\", value) @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self)", "or more attempts are made if a potential infrastructure issue", "def project(self) -> Optional[pulumi.Input[str]]: \"\"\" The cloud project that owns", "of the resource to lookup. :param pulumi.ResourceOptions opts: Options for", "0, which implies no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @property", "def flaky_test_attempts(self) -> Optional[pulumi.Input[int]]: \"\"\" The number of times a", "test_specification: How to run the test. :param pulumi.Input['ClientInfoArgs'] client_info: Information", "not None: pulumi.set(__self__, \"flaky_test_attempts\", flaky_test_attempts) if project is not None:", "devices the tests are being executed on. \"\"\" return pulumi.get(self,", "attempts are made if a potential infrastructure issue is detected.", "matrix of tests according to the given specifications. Unsupported environments", "\"project\") @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']: \"\"\" Where the", "May return any of the following canonical error codes: -", "about the client which invoked the test. \"\"\" return pulumi.get(self,", "\"test_executions\") @property @pulumi.getter(name=\"testMatrixId\") def test_matrix_id(self) -> pulumi.Output[str]: \"\"\" Unique id", "is None and not opts.urn: raise TypeError(\"Missing required property 'environment_matrix'\")", "generated by the Pulumi SDK Generator. *** # *** Do", "def flaky_test_attempts(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, \"flaky_test_attempts\", value) @property @pulumi.getter def", "return any of the following canonical error codes: - PERMISSION_DENIED", "pulumi.Input['ResultStorageArgs']): pulumi.set(self, \"result_storage\", value) @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Input['TestSpecificationArgs']:", "is 10. Default is 0, which implies no reruns. \"\"\"", "environments will be returned in the state UNSUPPORTED. A test", "limited to use at most 2000 devices in parallel. May", "test. Only set when the test matrix state is FINISHED.", "request is malformed or if the matrix tries to use", "__self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] =", "Optional[pulumi.Input[str]]: return pulumi.get(self, \"request_id\") @request_id.setter def request_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self,", "that owns the test matrix. \"\"\" pulumi.set(__self__, \"environment_matrix\", environment_matrix) pulumi.set(__self__,", "= None return TestMatrix(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"clientInfo\") def client_info(self)", "environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None, fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts: Optional[pulumi.Input[int]]", "pulumi.Input['TestSpecificationArgs']: \"\"\" How to run the test. \"\"\" return pulumi.get(self,", "None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected", "the resource. :param TestMatrixArgs args: The arguments to use to", "'environment_matrix'\") __props__.__dict__[\"environment_matrix\"] = environment_matrix __props__.__dict__[\"fail_fast\"] = fail_fast __props__.__dict__[\"flaky_test_attempts\"] = flaky_test_attempts", "test matrix. \"\"\" return pulumi.get(self, \"project\") @property @pulumi.getter(name=\"resultStorage\") def result_storage(self)", "When deleted, the resource will persist on Google Cloud even", "use at most 2000 devices in parallel. May return any", "Generator. *** # *** Do not edit by hand unless", "Unsupported environments will be returned in the state UNSUPPORTED. A", "pulumi.get(self, \"result_storage\") @result_storage.setter def result_storage(self, value: pulumi.Input['ResultStorageArgs']): pulumi.set(self, \"result_storage\", value)", ":param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[pulumi.InputType['ClientInfoArgs']] client_info:", "\"\"\" Output Only. The overall outcome of the test. Only", "@property @pulumi.getter(name=\"requestId\") def request_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, \"request_id\") @request_id.setter", "Where the results for the matrix are written. :param pulumi.Input['TestSpecificationArgs']", "This feature is for latency sensitive workloads. The incidence of", "\"test_specification\") @property @pulumi.getter def timestamp(self) -> pulumi.Output[str]: \"\"\" The time", "__props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = client_info if environment_matrix is None", "are doing! *** import warnings import pulumi import pulumi.runtime from", "= None, project: Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] = None,", "test. :param pulumi.Input[bool] fail_fast: If true, only a single attempt", "pulumi.get(self, \"client_info\") @client_info.setter def client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self, \"client_info\", value)", "FINISHED. \"\"\" return pulumi.get(self, \"outcome_summary\") @property @pulumi.getter def project(self) ->", "ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options", "specifications. Unsupported environments will be returned in the state UNSUPPORTED.", "**kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, client_info:", "tests are being executed on. \"\"\" return pulumi.get(self, \"environment_matrix\") @environment_matrix.setter", "any of the following canonical error codes: - PERMISSION_DENIED -", "-> Optional[pulumi.Input[str]]: return pulumi.get(self, \"request_id\") @request_id.setter def request_id(self, value: Optional[pulumi.Input[str]]):", "if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name,", "__props__.__dict__[\"project\"] = None __props__.__dict__[\"result_storage\"] = None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"]", "\"\"\" return pulumi.get(self, \"outcome_summary\") @property @pulumi.getter def project(self) -> pulumi.Output[str]:", "to project - INVALID_ARGUMENT - if the request is malformed", "result_storage) pulumi.set(__self__, \"test_specification\", test_specification) if client_info is not None: pulumi.set(__self__,", "considered invalid. Only useful for matrices in the INVALID state.", "None, flaky_test_attempts: Optional[pulumi.Input[int]] = None, project: Optional[pulumi.Input[str]] = None, request_id:", "__self__).__init__( 'google-native:testing/v1:TestMatrix', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id:", "use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options", "is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise", "None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None", "# *** Do not edit by hand unless you're certain", "TestMatrix(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None,", "from ._enums import * from ._inputs import * __all__ =", "simultaneous devices. Auto-naming is currently not supported for this resource.", "of the resulting resource. :param pulumi.Input[str] id: The unique provider", "__props__.__dict__[\"result_storage\"] = None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"]", "pulumi.Input['ResultStorageArgs']: \"\"\" Where the results for the matrix are written.", "SDK Generator. *** # *** Do not edit by hand", "one or more of its test cases fail for any", "def __init__(__self__, resource_name: str, args: TestMatrixArgs, opts: Optional[pulumi.ResourceOptions] = None):", "the client which invoked the test. :param pulumi.Input[bool] fail_fast: If", "flaky_test_attempts(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, \"flaky_test_attempts\", value) @property @pulumi.getter def project(self)", "= None, result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None,", "value: pulumi.Input['ResultStorageArgs']): pulumi.set(self, \"result_storage\", value) @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) ->", "and support is more limited because of that expectation. :param", "run the test. \"\"\" return pulumi.get(self, \"test_specification\") @property @pulumi.getter def", "maximum number of reruns allowed is 10. Default is 0,", "None, environment_matrix: Optional[pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']]] = None, fail_fast: Optional[pulumi.Input[bool]] = None, flaky_test_attempts:", "state. \"\"\" return pulumi.get(self, \"invalid_matrix_details\") @property @pulumi.getter(name=\"outcomeSummary\") def outcome_summary(self) ->", "opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = None", "opts.id to get an existing resource') __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"]", "by the Pulumi SDK Generator. *** # *** Do not", "return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"testExecutions\") def test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\"", "resource. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"]", "\"request_id\", request_id) @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']: \"\"\" The", "a valid opts.id to get an existing resource') __props__ =", "pulumi.set(__self__, \"request_id\", request_id) @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']: \"\"\"", "return pulumi.get(self, \"invalid_matrix_details\") @property @pulumi.getter(name=\"outcomeSummary\") def outcome_summary(self) -> pulumi.Output[str]: \"\"\"", "pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts,", "pulumi.set(self, \"project\", value) @property @pulumi.getter(name=\"requestId\") def request_id(self) -> Optional[pulumi.Input[str]]: return", "timestamp(self) -> pulumi.Output[str]: \"\"\" The time this test matrix was", "None): \"\"\" Creates and runs a matrix of tests according", "TypeError(\"Missing required property 'environment_matrix'\") __props__.__dict__[\"environment_matrix\"] = environment_matrix __props__.__dict__[\"fail_fast\"] = fail_fast", "result_storage: Where the results for the matrix are written. :param", "__props__.__dict__[\"fail_fast\"] = fail_fast __props__.__dict__[\"flaky_test_attempts\"] = flaky_test_attempts __props__.__dict__[\"project\"] = project __props__.__dict__[\"request_id\"]", "\"request_id\", value) class TestMatrix(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts:", "get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'TestMatrix':", "test attempts are not affected. Normally, 2 or more attempts", "'google-native:testing/v1:TestMatrix', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str],", "ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if", "when passed in combination with a valid opts.id to get", "def test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']: \"\"\" How to run the test.", "is not None: pulumi.set(__self__, \"request_id\", request_id) @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self)", "= None __props__.__dict__[\"result_storage\"] = None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] =", "def test_matrix_id(self) -> pulumi.Output[str]: \"\"\" Unique id set by the", "Information about the client which invoked the test. :param pulumi.Input[bool]", "test. \"\"\" return pulumi.get(self, \"client_info\") @client_info.setter def client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]):", "None __props__.__dict__[\"timestamp\"] = None super(TestMatrix, __self__).__init__( 'google-native:testing/v1:TestMatrix', resource_name, __props__, opts)", "@pulumi.input_type class TestMatrixArgs: def __init__(__self__, *, environment_matrix: pulumi.Input['EnvironmentMatrixArgs'], result_storage: pulumi.Input['ResultStorageArgs'],", "are being executed on. :param pulumi.Input[bool] fail_fast: If true, only", "pulumi.Output[bool]: \"\"\" If true, only a single attempt at most", "\"\"\" ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts", "resource's state with the given name, id, and optional extra", "The unique provider ID of the resource to lookup. :param", "most will be made to run each execution/shard in the", "value: pulumi.Input['TestSpecificationArgs']): pulumi.set(self, \"test_specification\", value) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) ->", "to run the test. \"\"\" return pulumi.get(self, \"test_specification\") @property @pulumi.getter", "__props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"test_specification\"] = None __props__.__dict__[\"timestamp\"]", "matrix are written. :param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How to run the", "pulumi.ResourceOptions(id=id)) __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = None __props__.__dict__[\"environment_matrix\"] = None", "def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) ->", "state(self) -> pulumi.Output[str]: \"\"\" Indicates the current progress of the", "user is not authorized to write to project - INVALID_ARGUMENT", "matrix state is FINISHED. \"\"\" return pulumi.get(self, \"outcome_summary\") @property @pulumi.getter", "\"\"\" return pulumi.get(self, \"environment_matrix\") @property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> pulumi.Output[bool]:", "an existing TestMatrix resource's state with the given name, id,", "Optional[pulumi.Input[str]] = None): \"\"\" The set of arguments for constructing", "def client_info(self, value: Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self, \"client_info\", value) @property @pulumi.getter(name=\"failFast\") def", "id, and optional extra properties used to qualify the lookup.", "args: TestMatrixArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\" Creates and runs", "Where the results for the matrix are written. \"\"\" return", "which implies no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @flaky_test_attempts.setter def", "because of that expectation. \"\"\" return pulumi.get(self, \"fail_fast\") @property @pulumi.getter(name=\"flakyTestAttempts\")", "unless you're certain you know what you are doing! ***", "why the matrix is considered invalid. Only useful for matrices", "pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to", "None __props__.__dict__[\"test_specification\"] = None __props__.__dict__[\"timestamp\"] = None return TestMatrix(resource_name, opts=opts,", "fail_fast(self) -> pulumi.Output[bool]: \"\"\" If true, only a single attempt", "a potential infrastructure issue is detected. This feature is for", "pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self, \"environment_matrix\", value) @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Input['ResultStorageArgs']:", "result_storage(self) -> pulumi.Input['ResultStorageArgs']: \"\"\" Where the results for the matrix", "given specifications. Unsupported environments will be returned in the state", ":param TestMatrixArgs args: The arguments to use to populate this", "pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\" The list of test executions that the service", "if opts.id is None: if __props__ is not None: raise", "request_id: Optional[pulumi.Input[str]] = None): \"\"\" The set of arguments for", "owns the test matrix. :param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where the results", "no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\") @property @pulumi.getter(name=\"invalidMatrixDetails\") def invalid_matrix_details(self)", "= TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = None __props__.__dict__[\"environment_matrix\"] = None __props__.__dict__[\"fail_fast\"] =", "to lookup. :param pulumi.ResourceOptions opts: Options for the resource. \"\"\"", "... import _utilities from . import outputs from ._enums import", "are written. :param pulumi.Input[pulumi.InputType['TestSpecificationArgs']] test_specification: How to run the test.", "__props__.__dict__[\"fail_fast\"] = None __props__.__dict__[\"flaky_test_attempts\"] = None __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"]", "import * from ._inputs import * __all__ = ['TestMatrixArgs', 'TestMatrix']", "outcome_summary(self) -> pulumi.Output[str]: \"\"\" Output Only. The overall outcome of", "pulumi.Output[str]: \"\"\" Unique id set by the service. \"\"\" return", "of reruns allowed is 10. Default is 0, which implies", "project that owns the test matrix. \"\"\" pulumi.set(__self__, \"environment_matrix\", environment_matrix)", "# coding=utf-8 # *** WARNING: this file was generated by", "import warnings import pulumi import pulumi.runtime from typing import Any,", "client which invoked the test. :param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The devices", "pulumi.get(self, \"outcome_summary\") @property @pulumi.getter def project(self) -> pulumi.Output[str]: \"\"\" The", "= None, flaky_test_attempts: Optional[pulumi.Input[int]] = None, project: Optional[pulumi.Input[str]] = None,", "which invoked the test. :param pulumi.Input[pulumi.InputType['EnvironmentMatrixArgs']] environment_matrix: The devices the", "is 0, which implies no reruns. \"\"\" return pulumi.get(self, \"flaky_test_attempts\")", "= None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"timestamp\"] = None super(TestMatrix, __self__).__init__(", "-> pulumi.Output[bool]: \"\"\" If true, only a single attempt at", "provider ID of the resource to lookup. :param pulumi.ResourceOptions opts:", "@pulumi.getter def timestamp(self) -> pulumi.Output[str]: \"\"\" The time this test", "return pulumi.get(self, \"test_executions\") @property @pulumi.getter(name=\"testMatrixId\") def test_matrix_id(self) -> pulumi.Output[str]: \"\"\"", "is None and not opts.urn: raise TypeError(\"Missing required property 'test_specification'\")", "matrix are written. \"\"\" return pulumi.get(self, \"result_storage\") @result_storage.setter def result_storage(self,", "= None __props__.__dict__[\"timestamp\"] = None return TestMatrix(resource_name, opts=opts, __props__=__props__) @property", "*args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args, **kwargs) if", "is not None: raise TypeError('__props__ is only valid when passed", "pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance')", "pulumi.set(__self__, \"flaky_test_attempts\", flaky_test_attempts) if project is not None: pulumi.set(__self__, \"project\",", "project: Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] = None): \"\"\" The", "if one or more of its test cases fail for", "Optional[pulumi.Input[str]] = None, result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] = None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] =", "*** # *** Do not edit by hand unless you're", "Optional[pulumi.Input['ClientInfoArgs']]: \"\"\" Information about the client which invoked the test.", "for the resource. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ =", "-> Optional[pulumi.Input['ClientInfoArgs']]: \"\"\" Information about the client which invoked the", "is None and not opts.urn: raise TypeError(\"Missing required property 'result_storage'\")", "10. Default is 0, which implies no reruns. \"\"\" return", "None: pulumi.set(__self__, \"client_info\", client_info) if fail_fast is not None: pulumi.set(__self__,", "this resource's API doesn't support deletion. When deleted, the resource", "= None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] =", "invalid_matrix_details(self) -> pulumi.Output[str]: \"\"\" Describes why the matrix is considered", "for latency sensitive workloads. The incidence of execution failures may", "with a valid opts.id to get an existing resource') __props__", "\"environment_matrix\", value) @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Input['ResultStorageArgs']: \"\"\" Where", "@pulumi.getter(name=\"clientInfo\") def client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]: \"\"\" Information about the client", "else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]", "\"\"\" ... @overload def __init__(__self__, resource_name: str, args: TestMatrixArgs, opts:", "Optional[pulumi.ResourceOptions] = None) -> 'TestMatrix': \"\"\" Get an existing TestMatrix", "not opts.urn: raise TypeError(\"Missing required property 'test_specification'\") __props__.__dict__[\"test_specification\"] = test_specification", "\"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"testExecutions\") def test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]:", "None __props__.__dict__[\"project\"] = None __props__.__dict__[\"result_storage\"] = None __props__.__dict__[\"state\"] = None", "request_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"request_id\", value) class TestMatrix(pulumi.CustomResource): @overload def", "@pulumi.getter(name=\"failFast\") def fail_fast(self) -> pulumi.Output[bool]: \"\"\" If true, only a", "are written. \"\"\" return pulumi.get(self, \"result_storage\") @property @pulumi.getter def state(self)", "flaky_test_attempts: The number of times a TestExecution should be re-attempted", "= _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None:", "to run the test. \"\"\" ... @overload def __init__(__self__, resource_name:", "\"test_specification\", test_specification) if client_info is not None: pulumi.set(__self__, \"client_info\", client_info)", "project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"project\", value) @property @pulumi.getter(name=\"requestId\") def request_id(self)", "Cloud even though it will be deleted from Pulumi state.", "an existing resource') __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = client_info if", "@property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Input['EnvironmentMatrixArgs']: \"\"\" The devices the", "feature is for latency sensitive workloads. The incidence of execution", "None, __props__=None): \"\"\" Creates and runs a matrix of tests", "if the user is not authorized to write to project", "this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource.", "test_specification(self) -> pulumi.Output['outputs.TestSpecificationResponse']: \"\"\" How to run the test. \"\"\"", "support is more limited because of that expectation. :param pulumi.Input[int]", "pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload", "the results for the matrix are written. :param pulumi.Input['TestSpecificationArgs'] test_specification:", "['TestMatrixArgs', 'TestMatrix'] @pulumi.input_type class TestMatrixArgs: def __init__(__self__, *, environment_matrix: pulumi.Input['EnvironmentMatrixArgs'],", "is not None: pulumi.set(__self__, \"project\", project) if request_id is not", "for constructing a TestMatrix resource. :param pulumi.Input['EnvironmentMatrixArgs'] environment_matrix: The devices", "\"\"\" return pulumi.get(self, \"flaky_test_attempts\") @flaky_test_attempts.setter def flaky_test_attempts(self, value: Optional[pulumi.Input[int]]): pulumi.set(self,", "this matrix. \"\"\" return pulumi.get(self, \"test_executions\") @property @pulumi.getter(name=\"testMatrixId\") def test_matrix_id(self)", "\"fail_fast\", value) @property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> Optional[pulumi.Input[int]]: \"\"\" The", "@project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"project\", value) @property @pulumi.getter(name=\"requestId\")", "value: Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self, \"client_info\", value) @property @pulumi.getter(name=\"failFast\") def fail_fast(self) ->", "__props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]", "are being executed on. \"\"\" return pulumi.get(self, \"environment_matrix\") @property @pulumi.getter(name=\"failFast\")", "-> pulumi.Output[str]: \"\"\" Unique id set by the service. \"\"\"", "request_id is not None: pulumi.set(__self__, \"request_id\", request_id) @property @pulumi.getter(name=\"environmentMatrix\") def", "The incidence of execution failures may be significantly greater for", "though it will be deleted from Pulumi state. :param str", "client which invoked the test. \"\"\" return pulumi.get(self, \"client_info\") @client_info.setter", "the tests are being executed on. \"\"\" return pulumi.get(self, \"environment_matrix\")", "- PERMISSION_DENIED - if the user is not authorized to", "from typing import Any, Mapping, Optional, Sequence, Union, overload from", "def __init__(__self__, *, environment_matrix: pulumi.Input['EnvironmentMatrixArgs'], result_storage: pulumi.Input['ResultStorageArgs'], test_specification: pulumi.Input['TestSpecificationArgs'], client_info:", "TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = None __props__.__dict__[\"environment_matrix\"] = None __props__.__dict__[\"fail_fast\"] = None", "@pulumi.getter(name=\"testMatrixId\") def test_matrix_id(self) -> pulumi.Output[str]: \"\"\" Unique id set by", "Normally, 2 or more attempts are made if a potential", "@property @pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> Optional[pulumi.Input[int]]: \"\"\" The number of", "overall outcome of the test. Only set when the test", "WARNING: this file was generated by the Pulumi SDK Generator.", "opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource", "import Any, Mapping, Optional, Sequence, Union, overload from ... import", "devices in parallel. May return any of the following canonical", "for matrices in the INVALID state. \"\"\" return pulumi.get(self, \"invalid_matrix_details\")", "certain you know what you are doing! *** import warnings", "class TestMatrixArgs: def __init__(__self__, *, environment_matrix: pulumi.Input['EnvironmentMatrixArgs'], result_storage: pulumi.Input['ResultStorageArgs'], test_specification:", "@test_specification.setter def test_specification(self, value: pulumi.Input['TestSpecificationArgs']): pulumi.set(self, \"test_specification\", value) @property @pulumi.getter(name=\"clientInfo\")", "\"\"\" Creates and runs a matrix of tests according to", "__props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"]", "-> Optional[pulumi.Input[int]]: \"\"\" The number of times a TestExecution should", "-> pulumi.Output[str]: \"\"\" Indicates the current progress of the test", "None return TestMatrix(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) ->", "this resource. Note - this resource's API doesn't support deletion.", "@pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) -> pulumi.Output['outputs.EnvironmentMatrixResponse']: \"\"\" The devices the tests", "progress of the test matrix. \"\"\" return pulumi.get(self, \"state\") @property", "executed on. :param pulumi.Input['ResultStorageArgs'] result_storage: Where the results for the", "the resource. \"\"\" ... def __init__(__self__, resource_name: str, *args, **kwargs):", "\"\"\" pulumi.set(__self__, \"environment_matrix\", environment_matrix) pulumi.set(__self__, \"result_storage\", result_storage) pulumi.set(__self__, \"test_specification\", test_specification)", "at most will be made to run each execution/shard in", "project(self) -> pulumi.Output[str]: \"\"\" The cloud project that owns the", "run the test. \"\"\" ... @overload def __init__(__self__, resource_name: str,", "# *** WARNING: this file was generated by the Pulumi", "TypeError('__props__ is only valid when passed in combination with a", "of the test matrix. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"testExecutions\")", "was generated by the Pulumi SDK Generator. *** # ***", "the matrix are written. \"\"\" return pulumi.get(self, \"result_storage\") @result_storage.setter def", "on. :param pulumi.Input['ResultStorageArgs'] result_storage: Where the results for the matrix", "limited because of that expectation. :param pulumi.Input[int] flaky_test_attempts: The number", "not None: raise TypeError('__props__ is only valid when passed in", "pulumi.Output[str]: \"\"\" Describes why the matrix is considered invalid. Only", "Optional[pulumi.Input[str]] = None, request_id: Optional[pulumi.Input[str]] = None, result_storage: Optional[pulumi.Input[pulumi.InputType['ResultStorageArgs']]] =", "Optional[pulumi.Input['ClientInfoArgs']]): pulumi.set(self, \"client_info\", value) @property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> Optional[pulumi.Input[bool]]:", "the test matrix. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"testExecutions\") def", "= None __props__.__dict__[\"timestamp\"] = None super(TestMatrix, __self__).__init__( 'google-native:testing/v1:TestMatrix', resource_name, __props__,", "__props__.__dict__[\"state\"] = None __props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"test_specification\"]", "\"project\", value) @property @pulumi.getter(name=\"requestId\") def request_id(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self,", "The arguments to use to populate this resource's properties. :param", "import * __all__ = ['TestMatrixArgs', 'TestMatrix'] @pulumi.input_type class TestMatrixArgs: def", "required property 'environment_matrix'\") __props__.__dict__[\"environment_matrix\"] = environment_matrix __props__.__dict__[\"fail_fast\"] = fail_fast __props__.__dict__[\"flaky_test_attempts\"]", "pulumi.Input[str] id: The unique provider ID of the resource to", "not None: pulumi.set(__self__, \"request_id\", request_id) @property @pulumi.getter(name=\"environmentMatrix\") def environment_matrix(self) ->", "The cloud project that owns the test matrix. :param pulumi.Input[pulumi.InputType['ResultStorageArgs']]", "pulumi.set(self, \"result_storage\", value) @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Input['TestSpecificationArgs']: \"\"\"", "pulumi.Input[int] flaky_test_attempts: The number of times a TestExecution should be", "project) if request_id is not None: pulumi.set(__self__, \"request_id\", request_id) @property", "Optional[pulumi.Input[str]]): pulumi.set(self, \"request_id\", value) class TestMatrix(pulumi.CustomResource): @overload def __init__(__self__, resource_name:", "if the request is malformed or if the matrix tries", "that owns the test matrix. :param pulumi.Input[pulumi.InputType['ResultStorageArgs']] result_storage: Where the", "matrices and support is more limited because of that expectation.", "if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts,", "opts=opts, __props__=__props__) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']: \"\"\" Information", "project is not None: pulumi.set(__self__, \"project\", project) if request_id is", "Default is 0, which implies no reruns. \"\"\" return pulumi.get(self,", "if the matrix tries to use too many simultaneous devices.", "__props__.__dict__[\"flaky_test_attempts\"] = flaky_test_attempts __props__.__dict__[\"project\"] = project __props__.__dict__[\"request_id\"] = request_id if", "resource_args, opts = _utilities.get_resource_args_opts(TestMatrixArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is", "test. \"\"\" return pulumi.get(self, \"test_specification\") @property @pulumi.getter def timestamp(self) ->", "unique name of the resulting resource. :param pulumi.Input[str] id: The", "@property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']: \"\"\" Where the results", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"request_id\", value) class TestMatrix(pulumi.CustomResource): @overload def __init__(__self__,", "= flaky_test_attempts __props__.__dict__[\"project\"] = project __props__.__dict__[\"request_id\"] = request_id if result_storage", "value: pulumi.Input['EnvironmentMatrixArgs']): pulumi.set(self, \"environment_matrix\", value) @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) ->", "@property @pulumi.getter(name=\"failFast\") def fail_fast(self) -> Optional[pulumi.Input[bool]]: \"\"\" If true, only", "if flaky_test_attempts is not None: pulumi.set(__self__, \"flaky_test_attempts\", flaky_test_attempts) if project", "@pulumi.getter(name=\"flakyTestAttempts\") def flaky_test_attempts(self) -> pulumi.Output[int]: \"\"\" The number of times", "pulumi.get(self, \"test_specification\") @test_specification.setter def test_specification(self, value: pulumi.Input['TestSpecificationArgs']): pulumi.set(self, \"test_specification\", value)", "= None __props__.__dict__[\"fail_fast\"] = None __props__.__dict__[\"flaky_test_attempts\"] = None __props__.__dict__[\"invalid_matrix_details\"] =", "@property @pulumi.getter def project(self) -> pulumi.Output[str]: \"\"\" The cloud project", "in the INVALID state. \"\"\" return pulumi.get(self, \"invalid_matrix_details\") @property @pulumi.getter(name=\"outcomeSummary\")", "reruns allowed is 10. Default is 0, which implies no", "@property @pulumi.getter(name=\"testMatrixId\") def test_matrix_id(self) -> pulumi.Output[str]: \"\"\" Unique id set", "being executed on. :param pulumi.Input['ResultStorageArgs'] result_storage: Where the results for", "value) @property @pulumi.getter(name=\"clientInfo\") def client_info(self) -> Optional[pulumi.Input['ClientInfoArgs']]: \"\"\" Information about", "lookup. :param pulumi.ResourceOptions opts: Options for the resource. \"\"\" opts", "pulumi.get(self, \"fail_fast\") @fail_fast.setter def fail_fast(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, \"fail_fast\", value)", "-> pulumi.Output['outputs.TestSpecificationResponse']: \"\"\" How to run the test. \"\"\" return", "for the matrix are written. :param pulumi.Input['TestSpecificationArgs'] test_specification: How to", "issue is detected. This feature is for latency sensitive workloads.", "Sequence, Union, overload from ... import _utilities from . import", "def client_info(self) -> pulumi.Output['outputs.ClientInfoResponse']: \"\"\" Information about the client which", "\"\"\" return pulumi.get(self, \"test_specification\") @property @pulumi.getter def timestamp(self) -> pulumi.Output[str]:", "number of times a TestExecution should be re-attempted if one", "be made to run each execution/shard in the matrix. Flaky", "resource_name: The name of the resource. :param TestMatrixArgs args: The", "of tests according to the given specifications. Unsupported environments will", "invoked the test. \"\"\" return pulumi.get(self, \"client_info\") @client_info.setter def client_info(self,", "matrix is considered invalid. Only useful for matrices in the", "the results for the matrix are written. \"\"\" return pulumi.get(self,", "def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, client_info: Optional[pulumi.Input[pulumi.InputType['ClientInfoArgs']]]", "matrix. \"\"\" return pulumi.get(self, \"test_executions\") @property @pulumi.getter(name=\"testMatrixId\") def test_matrix_id(self) ->", "id: The unique provider ID of the resource to lookup.", "useful for matrices in the INVALID state. \"\"\" return pulumi.get(self,", "written. \"\"\" return pulumi.get(self, \"result_storage\") @result_storage.setter def result_storage(self, value: pulumi.Input['ResultStorageArgs']):", "is None: opts.version = _utilities.get_version() if opts.id is None: if", "return pulumi.get(self, \"outcome_summary\") @property @pulumi.getter def project(self) -> pulumi.Output[str]: \"\"\"", "Options for the resource. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__", "environment_matrix: The devices the tests are being executed on. :param", "__props__ is not None: raise TypeError('__props__ is only valid when", "= None __props__.__dict__[\"environment_matrix\"] = None __props__.__dict__[\"fail_fast\"] = None __props__.__dict__[\"flaky_test_attempts\"] =", "Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): if opts is None: opts =", "value) @property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Input['TestSpecificationArgs']: \"\"\" How to", "state is FINISHED. \"\"\" return pulumi.get(self, \"outcome_summary\") @property @pulumi.getter def", "workloads. The incidence of execution failures may be significantly greater", "def fail_fast(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, \"fail_fast\", value) @property @pulumi.getter(name=\"flakyTestAttempts\") def", "Do not edit by hand unless you're certain you know", "__props__=None): \"\"\" Creates and runs a matrix of tests according", "that the service creates for this matrix. \"\"\" return pulumi.get(self,", "fail_fast __props__.__dict__[\"flaky_test_attempts\"] = flaky_test_attempts __props__.__dict__[\"project\"] = project __props__.__dict__[\"request_id\"] = request_id", "for this matrix. \"\"\" return pulumi.get(self, \"test_executions\") @property @pulumi.getter(name=\"testMatrixId\") def", "-> pulumi.Output[int]: \"\"\" The number of times a TestExecution should", "invoked the test. :param pulumi.Input[bool] fail_fast: If true, only a", "\"state\") @property @pulumi.getter(name=\"testExecutions\") def test_executions(self) -> pulumi.Output[Sequence['outputs.TestExecutionResponse']]: \"\"\" The list", "pulumi.Input[str] project: The cloud project that owns the test matrix.", "not supported for this resource. Note - this resource's API", "None, test_specification: Optional[pulumi.Input[pulumi.InputType['TestSpecificationArgs']]] = None, __props__=None): \"\"\" Creates and runs", "None __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None __props__.__dict__[\"project\"] = None", "UNSUPPORTED. A test matrix is limited to use at most", "isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions", "to the given specifications. Unsupported environments will be returned in", "\"\"\" Where the results for the matrix are written. \"\"\"", "are written. :param pulumi.Input['TestSpecificationArgs'] test_specification: How to run the test.", "return pulumi.get(self, \"flaky_test_attempts\") @property @pulumi.getter(name=\"invalidMatrixDetails\") def invalid_matrix_details(self) -> pulumi.Output[str]: \"\"\"", "will persist on Google Cloud even though it will be", "canonical error codes: - PERMISSION_DENIED - if the user is", "resource') __props__ = TestMatrixArgs.__new__(TestMatrixArgs) __props__.__dict__[\"client_info\"] = client_info if environment_matrix is", "\"\"\" return pulumi.get(self, \"invalid_matrix_details\") @property @pulumi.getter(name=\"outcomeSummary\") def outcome_summary(self) -> pulumi.Output[str]:", "\"\"\" Unique id set by the service. \"\"\" return pulumi.get(self,", "is only valid when passed in combination with a valid", "pulumi.set(__self__, \"result_storage\", result_storage) pulumi.set(__self__, \"test_specification\", test_specification) if client_info is not", "the matrix is considered invalid. Only useful for matrices in", "valid opts.id to get an existing resource') __props__ = TestMatrixArgs.__new__(TestMatrixArgs)", "-> pulumi.Output[str]: \"\"\" The cloud project that owns the test", "None __props__.__dict__[\"flaky_test_attempts\"] = None __props__.__dict__[\"invalid_matrix_details\"] = None __props__.__dict__[\"outcome_summary\"] = None", "cases fail for any reason. The maximum number of reruns", "pulumi.Input['TestSpecificationArgs'] test_specification: How to run the test. :param pulumi.Input['ClientInfoArgs'] client_info:", "invoked the test. \"\"\" return pulumi.get(self, \"client_info\") @property @pulumi.getter(name=\"environmentMatrix\") def", ":param pulumi.Input[int] flaky_test_attempts: The number of times a TestExecution should", "owns the test matrix. \"\"\" return pulumi.get(self, \"project\") @project.setter def", "fail_fast: If true, only a single attempt at most will", "Only. The overall outcome of the test. Only set when", "and not opts.urn: raise TypeError(\"Missing required property 'result_storage'\") __props__.__dict__[\"result_storage\"] =", "only a single attempt at most will be made to", "pulumi.get(self, \"test_executions\") @property @pulumi.getter(name=\"testMatrixId\") def test_matrix_id(self) -> pulumi.Output[str]: \"\"\" Unique", "@property @pulumi.getter(name=\"testSpecification\") def test_specification(self) -> pulumi.Input['TestSpecificationArgs']: \"\"\" How to run", "__props__.__dict__[\"test_executions\"] = None __props__.__dict__[\"test_matrix_id\"] = None __props__.__dict__[\"timestamp\"] = None super(TestMatrix,", "@pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Output['outputs.ResultStorageResponse']: \"\"\" Where the results for", "value) @property @pulumi.getter(name=\"resultStorage\") def result_storage(self) -> pulumi.Input['ResultStorageArgs']: \"\"\" Where the", "Only useful for matrices in the INVALID state. \"\"\" return", "\"\"\" How to run the test. \"\"\" return pulumi.get(self, \"test_specification\")", "Only set when the test matrix state is FINISHED. \"\"\"", "not None: pulumi.set(__self__, \"client_info\", client_info) if fail_fast is not None:" ]
[ "value_entry = tk.Entry(self.value_config_frame, width=10) value_entry.insert(tk.END, val_config.value) value_entry.config(state = tk.DISABLED) value_entry.config(disabledforeground", "bt_enable : self.toggle_val_config_enable(id, button) bt_enable.grid(row = i + 1, column", "column = 2) self.config_frame.pack(side=tk.TOP, anchor=tk.NW) self.value_config_frame = tk.Frame(self.root) # Config", "lb_enable = tk.Label(self.value_config_frame, width = 10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row", "val_config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground = \"white\")", "button[\"text\"] = \"ON\" if val_config.enable else \"OFF\" color = \"green\"", "color = \"green\" if config.enable else \"red\" bt_enable.config(bg=color, activebackground =", "detail_entry.insert(tk.END, val_config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground =", "def toggle_val_config_enable(self, id, button : tk.Button): val_config = self.control.getValConfigs()[id] val_config.enable", "\"white\") detail_entry.grid(row= i + 1, column = 1) value_entry =", "activebackground = color) def toggle_val_config_enable(self, id, button : tk.Button): val_config", "self.root = tk.Tk() self.root.title(u\"Header File Generator\") self.root.geometry(\"700x800\") self.config_frame = tk.Frame(self.root)", "= tk.Label(self.config_frame, width = 10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row =", "1) bt_enable = tk.Button(self.config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\"", "\"black\", disabledbackground = \"white\") symbol_entry.grid(row= i + 1, column =", "= 1) value_entry = tk.Entry(self.value_config_frame, width=10) value_entry.insert(tk.END, val_config.value) value_entry.config(state =", "if val_config.enable else \"red\" bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"] =", "10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row = 0, column = 2)", "symbol_entry.insert(tk.END, config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground =", "= 10) lb_value[\"text\"] = \"Value\" lb_value.grid(row = 0, column =", "detail_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") detail_entry.grid(row= i + 1,", "Info\", \"Generated:{0}\".format(self.control.header_config.path)) def update(self): pass def toggle_config_enable(self, id, button :", "import Control class View: def __init__(self, control : Control.Control): self.control", "= \"ON\" if val_config.enable else \"OFF\" color = \"green\" if", "color) def toggle_val_config_enable(self, id, button : tk.Button): val_config = self.control.getValConfigs()[id]", "10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row = 0, column = 3)", "lambda id = i, button = bt_enable : self.toggle_config_enable(id, button)", "\"red\" bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"] = lambda id =", "+ 1, column = 0) detail_entry = tk.Entry(self.config_frame, width=40) detail_entry.insert(tk.END,", "self.bt_generate[\"text\"] = \"Generate Header\" self.bt_generate[\"command\"] = self.generateHeader self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE) def", "def update(self): pass def toggle_config_enable(self, id, button : tk.Button): config", "= \"white\") value_entry.grid(row= i + 1, column = 2) bt_enable", "detail_entry.grid(row= i + 1, column = 1) value_entry = tk.Entry(self.value_config_frame,", "bt_enable.grid(row = i + 1, column = 3) self.value_config_frame.pack(side=tk.TOP, anchor=tk.W)", "start(self): self.root.mainloop() def generateHeader(self): self.control.generateHeader() tk.messagebox.showinfo(\"Header Generator Info\", \"Generated:{0}\".format(self.control.header_config.path)) def", "= tk.Label(self.value_config_frame, width = 40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row =", "column = 1) value_entry = tk.Entry(self.value_config_frame, width=10) value_entry.insert(tk.END, val_config.value) value_entry.config(state", "tk.Entry(self.value_config_frame, width=40) detail_entry.insert(tk.END, val_config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\",", "= i + 1, column = 2) self.config_frame.pack(side=tk.TOP, anchor=tk.NW) self.value_config_frame", "class View: def __init__(self, control : Control.Control): self.control = control", "else \"red\" button.config(bg=color, activebackground = color) def toggle_val_config_enable(self, id, button", "= control # Init Window self.root = tk.Tk() self.root.title(u\"Header File", "color = \"green\" if config.enable else \"red\" button.config(bg=color, activebackground =", "\"green\" if config.enable else \"red\" bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"]", "button : tk.Button): config = self.control.getConfigs()[id] config.enable = not config.enable", "i, config in enumerate(self.control.getConfigs()): symbol_entry = tk.Entry(self.config_frame, width=20) symbol_entry.insert(tk.END, config.symbol)", "# Config Table lb_symbol = tk.Label(self.config_frame, width = 20) lb_symbol[\"text\"]", "= 0, column = 1) lb_value = tk.Label(self.value_config_frame, width =", "button = bt_enable : self.toggle_config_enable(id, button) bt_enable.grid(row = i +", "i, button = bt_enable : self.toggle_config_enable(id, button) bt_enable.grid(row = i", "= \"Detail\" lb_description.grid(row = 0, column = 1) lb_enable =", "id = i, button = bt_enable : self.toggle_config_enable(id, button) bt_enable.grid(row", "tk.Entry(self.config_frame, width=40) detail_entry.insert(tk.END, config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\",", "\"ON\" if val_config.enable else \"OFF\" color = \"green\" if val_config.enable", "3) for i, val_config in enumerate(self.control.getValConfigs()): symbol_entry = tk.Entry(self.value_config_frame, width=20)", "= 0) lb_description = tk.Label(self.config_frame, width = 40) lb_description[\"text\"] =", "as tk import tkinter.messagebox from Control import Control class View:", "lb_enable = tk.Label(self.config_frame, width = 10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row", "self.control.getValConfigs()[id] val_config.enable = not val_config.enable button[\"text\"] = \"ON\" if val_config.enable", "symbol_entry = tk.Entry(self.value_config_frame, width=20) symbol_entry.insert(tk.END, val_config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground", "0) lb_description = tk.Label(self.config_frame, width = 40) lb_description[\"text\"] = \"Detail\"", "= tk.DISABLED) value_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") value_entry.grid(row= i", "self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE) def start(self): self.root.mainloop() def generateHeader(self): self.control.generateHeader() tk.messagebox.showinfo(\"Header Generator", "\"Detail\" lb_description.grid(row = 0, column = 1) lb_enable = tk.Label(self.config_frame,", "config.enable else \"OFF\" color = \"green\" if config.enable else \"red\"", "lb_description.grid(row = 0, column = 1) lb_enable = tk.Label(self.config_frame, width", ": tk.Button): val_config = self.control.getValConfigs()[id] val_config.enable = not val_config.enable button[\"text\"]", "2) self.config_frame.pack(side=tk.TOP, anchor=tk.NW) self.value_config_frame = tk.Frame(self.root) # Config Table lb_symbol", "self.control.getConfigs()[id] config.enable = not config.enable button[\"text\"] = \"ON\" if config.enable", "column = 1) lb_value = tk.Label(self.value_config_frame, width = 10) lb_value[\"text\"]", "= 0, column = 0) lb_description = tk.Label(self.value_config_frame, width =", "1, column = 0) detail_entry = tk.Entry(self.value_config_frame, width=40) detail_entry.insert(tk.END, val_config.detail)", "= \"Detail\" lb_description.grid(row = 0, column = 1) lb_value =", "tk.Button(self.config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\" if config.enable else", "lb_symbol.grid(row = 0, column = 0) lb_description = tk.Label(self.value_config_frame, width", "self.control = control # Init Window self.root = tk.Tk() self.root.title(u\"Header", "toggle_val_config_enable(self, id, button : tk.Button): val_config = self.control.getValConfigs()[id] val_config.enable =", "= tk.Label(self.config_frame, width = 20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row =", "button = bt_enable : self.toggle_val_config_enable(id, button) bt_enable.grid(row = i +", "\"Generated:{0}\".format(self.control.header_config.path)) def update(self): pass def toggle_config_enable(self, id, button : tk.Button):", "not config.enable button[\"text\"] = \"ON\" if config.enable else \"OFF\" color", "val_config.value) value_entry.config(state = tk.DISABLED) value_entry.config(disabledforeground = \"black\", disabledbackground = \"white\")", "= tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") detail_entry.grid(row= i", "self.bt_generate = tk.Button(self.root) self.bt_generate[\"text\"] = \"Generate Header\" self.bt_generate[\"command\"] = self.generateHeader", "tk.messagebox.showinfo(\"Header Generator Info\", \"Generated:{0}\".format(self.control.header_config.path)) def update(self): pass def toggle_config_enable(self, id,", "bt_enable[\"text\"] = \"ON\" if config.enable else \"OFF\" color = \"green\"", "\"OFF\" color = \"green\" if config.enable else \"red\" button.config(bg=color, activebackground", "pass def toggle_config_enable(self, id, button : tk.Button): config = self.control.getConfigs()[id]", "width=40) detail_entry.insert(tk.END, val_config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground", "column = 0) detail_entry = tk.Entry(self.config_frame, width=40) detail_entry.insert(tk.END, config.detail) detail_entry.config(state", "column = 2) bt_enable = tk.Button(self.value_config_frame, text=\"ON\", width= 5) bt_enable[\"text\"]", "= 0, column = 0) lb_description = tk.Label(self.config_frame, width =", "for i, config in enumerate(self.control.getConfigs()): symbol_entry = tk.Entry(self.config_frame, width=20) symbol_entry.insert(tk.END,", "= 3) self.value_config_frame.pack(side=tk.TOP, anchor=tk.W) # Generator Button self.bt_generate = tk.Button(self.root)", "i + 1, column = 1) bt_enable = tk.Button(self.config_frame, text=\"ON\",", "2) lb_enable = tk.Label(self.value_config_frame, width = 10) lb_enable[\"text\"] = \"Enable\"", "= tk.Button(self.root) self.bt_generate[\"text\"] = \"Generate Header\" self.bt_generate[\"command\"] = self.generateHeader self.bt_generate.pack(side=tk.BOTTOM,", "= \"green\" if val_config.enable else \"red\" button.config(bg=color, activebackground = color)", "i + 1, column = 0) detail_entry = tk.Entry(self.config_frame, width=40)", "= 2) lb_enable = tk.Label(self.value_config_frame, width = 10) lb_enable[\"text\"] =", "1, column = 2) self.config_frame.pack(side=tk.TOP, anchor=tk.NW) self.value_config_frame = tk.Frame(self.root) #", "lb_description[\"text\"] = \"Detail\" lb_description.grid(row = 0, column = 1) lb_enable", "= 0, column = 2) for i, config in enumerate(self.control.getConfigs()):", "value_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") value_entry.grid(row= i + 1,", "tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") detail_entry.grid(row= i +", "# Init Window self.root = tk.Tk() self.root.title(u\"Header File Generator\") self.root.geometry(\"700x800\")", "= 0, column = 2) lb_enable = tk.Label(self.value_config_frame, width =", "tkinter as tk import tkinter.messagebox from Control import Control class", "= 0) lb_description = tk.Label(self.value_config_frame, width = 40) lb_description[\"text\"] =", "button : tk.Button): val_config = self.control.getValConfigs()[id] val_config.enable = not val_config.enable", "= lambda id = i, button = bt_enable : self.toggle_val_config_enable(id,", "tk.Frame(self.root) # Config Table lb_symbol = tk.Label(self.value_config_frame, width = 20)", "5) bt_enable[\"text\"] = \"ON\" if config.enable else \"OFF\" color =", "self.toggle_config_enable(id, button) bt_enable.grid(row = i + 1, column = 2)", "Control import Control class View: def __init__(self, control : Control.Control):", "1) value_entry = tk.Entry(self.value_config_frame, width=10) value_entry.insert(tk.END, val_config.value) value_entry.config(state = tk.DISABLED)", "Generator Button self.bt_generate = tk.Button(self.root) self.bt_generate[\"text\"] = \"Generate Header\" self.bt_generate[\"command\"]", "bt_enable.grid(row = i + 1, column = 2) self.config_frame.pack(side=tk.TOP, anchor=tk.NW)", "\"green\" if config.enable else \"red\" button.config(bg=color, activebackground = color) def", "column = 2) for i, config in enumerate(self.control.getConfigs()): symbol_entry =", "tkinter.messagebox from Control import Control class View: def __init__(self, control", "from Control import Control class View: def __init__(self, control :", "= self.control.getValConfigs()[id] val_config.enable = not val_config.enable button[\"text\"] = \"ON\" if", "= tk.Label(self.value_config_frame, width = 10) lb_value[\"text\"] = \"Value\" lb_value.grid(row =", "\"Enable\" lb_enable.grid(row = 0, column = 2) for i, config", "button.config(bg=color, activebackground = color) def toggle_val_config_enable(self, id, button : tk.Button):", "Window self.root = tk.Tk() self.root.title(u\"Header File Generator\") self.root.geometry(\"700x800\") self.config_frame =", "= \"black\", disabledbackground = \"white\") symbol_entry.grid(row= i + 1, column", "\"green\" if val_config.enable else \"red\" bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"]", "config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground = \"white\")", "self.value_config_frame.pack(side=tk.TOP, anchor=tk.W) # Generator Button self.bt_generate = tk.Button(self.root) self.bt_generate[\"text\"] =", "1, column = 2) bt_enable = tk.Button(self.value_config_frame, text=\"ON\", width= 5)", "config.enable button[\"text\"] = \"ON\" if config.enable else \"OFF\" color =", "20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row = 0, column = 0)", "val_config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground = \"white\")", "tk.Label(self.value_config_frame, width = 20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row = 0,", "symbol_entry.grid(row= i + 1, column = 0) detail_entry = tk.Entry(self.value_config_frame,", "generateHeader(self): self.control.generateHeader() tk.messagebox.showinfo(\"Header Generator Info\", \"Generated:{0}\".format(self.control.header_config.path)) def update(self): pass def", "if config.enable else \"red\" bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"] =", "2) bt_enable = tk.Button(self.value_config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\"", "\"OFF\" color = \"green\" if val_config.enable else \"red\" bt_enable.config(bg=color, activebackground", "tk.Button(self.value_config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\" if val_config.enable else", "= lambda id = i, button = bt_enable : self.toggle_config_enable(id,", "else \"red\" bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"] = lambda id", ": Control.Control): self.control = control # Init Window self.root =", "column = 3) self.value_config_frame.pack(side=tk.TOP, anchor=tk.W) # Generator Button self.bt_generate =", "\"Detail\" lb_description.grid(row = 0, column = 1) lb_value = tk.Label(self.value_config_frame,", "= \"Symbol\" lb_symbol.grid(row = 0, column = 0) lb_description =", "disabledbackground = \"white\") value_entry.grid(row= i + 1, column = 2)", "\"black\", disabledbackground = \"white\") detail_entry.grid(row= i + 1, column =", "lb_value = tk.Label(self.value_config_frame, width = 10) lb_value[\"text\"] = \"Value\" lb_value.grid(row", "else \"OFF\" color = \"green\" if config.enable else \"red\" bt_enable.config(bg=color,", "toggle_config_enable(self, id, button : tk.Button): config = self.control.getConfigs()[id] config.enable =", "self.value_config_frame = tk.Frame(self.root) # Config Table lb_symbol = tk.Label(self.value_config_frame, width", "Generator\") self.root.geometry(\"700x800\") self.config_frame = tk.Frame(self.root) # Config Table lb_symbol =", "tk.Entry(self.value_config_frame, width=10) value_entry.insert(tk.END, val_config.value) value_entry.config(state = tk.DISABLED) value_entry.config(disabledforeground = \"black\",", "tk.Label(self.config_frame, width = 10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row = 0,", "= bt_enable : self.toggle_config_enable(id, button) bt_enable.grid(row = i + 1,", "lambda id = i, button = bt_enable : self.toggle_val_config_enable(id, button)", "= \"green\" if config.enable else \"red\" button.config(bg=color, activebackground = color)", "symbol_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") symbol_entry.grid(row= i + 1,", "id = i, button = bt_enable : self.toggle_val_config_enable(id, button) bt_enable.grid(row", "anchor=tk.NW) self.value_config_frame = tk.Frame(self.root) # Config Table lb_symbol = tk.Label(self.value_config_frame,", "Control class View: def __init__(self, control : Control.Control): self.control =", "def start(self): self.root.mainloop() def generateHeader(self): self.control.generateHeader() tk.messagebox.showinfo(\"Header Generator Info\", \"Generated:{0}\".format(self.control.header_config.path))", "tk.Label(self.value_config_frame, width = 10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row = 0,", "Config Table lb_symbol = tk.Label(self.value_config_frame, width = 20) lb_symbol[\"text\"] =", "config in enumerate(self.control.getConfigs()): symbol_entry = tk.Entry(self.config_frame, width=20) symbol_entry.insert(tk.END, config.symbol) symbol_entry.config(state", "else \"OFF\" color = \"green\" if val_config.enable else \"red\" bt_enable.config(bg=color,", "= tk.Frame(self.root) # Config Table lb_symbol = tk.Label(self.value_config_frame, width =", "= tk.Label(self.value_config_frame, width = 20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row =", "width=10) value_entry.insert(tk.END, val_config.value) value_entry.config(state = tk.DISABLED) value_entry.config(disabledforeground = \"black\", disabledbackground", "control : Control.Control): self.control = control # Init Window self.root", "= 3) for i, val_config in enumerate(self.control.getValConfigs()): symbol_entry = tk.Entry(self.value_config_frame,", "= tk.Button(self.value_config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\" if val_config.enable", "+ 1, column = 2) self.config_frame.pack(side=tk.TOP, anchor=tk.NW) self.value_config_frame = tk.Frame(self.root)", "self.toggle_val_config_enable(id, button) bt_enable.grid(row = i + 1, column = 3)", "self.config_frame.pack(side=tk.TOP, anchor=tk.NW) self.value_config_frame = tk.Frame(self.root) # Config Table lb_symbol =", "lb_description = tk.Label(self.value_config_frame, width = 40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row", "= \"black\", disabledbackground = \"white\") detail_entry.grid(row= i + 1, column", "bt_enable[\"text\"] = \"ON\" if val_config.enable else \"OFF\" color = \"green\"", "Control.Control): self.control = control # Init Window self.root = tk.Tk()", "+ 1, column = 1) value_entry = tk.Entry(self.value_config_frame, width=10) value_entry.insert(tk.END,", "\"OFF\" color = \"green\" if val_config.enable else \"red\" button.config(bg=color, activebackground", "= \"ON\" if config.enable else \"OFF\" color = \"green\" if", "= 0) detail_entry = tk.Entry(self.value_config_frame, width=40) detail_entry.insert(tk.END, val_config.detail) detail_entry.config(state =", "Button self.bt_generate = tk.Button(self.root) self.bt_generate[\"text\"] = \"Generate Header\" self.bt_generate[\"command\"] =", "= \"green\" if val_config.enable else \"red\" bt_enable.config(bg=color, activebackground = color)", "= i, button = bt_enable : self.toggle_val_config_enable(id, button) bt_enable.grid(row =", "detail_entry.insert(tk.END, config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground =", "Header\" self.bt_generate[\"command\"] = self.generateHeader self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE) def start(self): self.root.mainloop() def", "= self.control.getConfigs()[id] config.enable = not config.enable button[\"text\"] = \"ON\" if", "= tk.Tk() self.root.title(u\"Header File Generator\") self.root.geometry(\"700x800\") self.config_frame = tk.Frame(self.root) #", "symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") symbol_entry.grid(row=", "config = self.control.getConfigs()[id] config.enable = not config.enable button[\"text\"] = \"ON\"", "column = 0) lb_description = tk.Label(self.value_config_frame, width = 40) lb_description[\"text\"]", "enumerate(self.control.getValConfigs()): symbol_entry = tk.Entry(self.value_config_frame, width=20) symbol_entry.insert(tk.END, val_config.symbol) symbol_entry.config(state = tk.DISABLED)", "= \"green\" if config.enable else \"red\" bt_enable.config(bg=color, activebackground = color)", "1, column = 1) value_entry = tk.Entry(self.value_config_frame, width=10) value_entry.insert(tk.END, val_config.value)", "= tk.Entry(self.value_config_frame, width=20) symbol_entry.insert(tk.END, val_config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground =", "= 2) bt_enable = tk.Button(self.value_config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] =", "color = \"green\" if val_config.enable else \"red\" bt_enable.config(bg=color, activebackground =", "else \"OFF\" color = \"green\" if val_config.enable else \"red\" button.config(bg=color,", "disabledbackground = \"white\") detail_entry.grid(row= i + 1, column = 1)", "tk.Label(self.value_config_frame, width = 40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row = 0,", "0) detail_entry = tk.Entry(self.config_frame, width=40) detail_entry.insert(tk.END, config.detail) detail_entry.config(state = tk.DISABLED)", "i + 1, column = 2) self.config_frame.pack(side=tk.TOP, anchor=tk.NW) self.value_config_frame =", "40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row = 0, column = 1)", "tk.Label(self.value_config_frame, width = 10) lb_value[\"text\"] = \"Value\" lb_value.grid(row = 0,", "def toggle_config_enable(self, id, button : tk.Button): config = self.control.getConfigs()[id] config.enable", "if val_config.enable else \"OFF\" color = \"green\" if val_config.enable else", "= 10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row = 0, column =", "lb_symbol.grid(row = 0, column = 0) lb_description = tk.Label(self.config_frame, width", "i + 1, column = 1) value_entry = tk.Entry(self.value_config_frame, width=10)", "anchor=tk.SE) def start(self): self.root.mainloop() def generateHeader(self): self.control.generateHeader() tk.messagebox.showinfo(\"Header Generator Info\",", "= tk.Label(self.config_frame, width = 40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row =", "1) lb_enable = tk.Label(self.config_frame, width = 10) lb_enable[\"text\"] = \"Enable\"", "width = 10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row = 0, column", "= tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") symbol_entry.grid(row= i", "self.generateHeader self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE) def start(self): self.root.mainloop() def generateHeader(self): self.control.generateHeader() tk.messagebox.showinfo(\"Header", "lb_description = tk.Label(self.config_frame, width = 40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row", "tk.Frame(self.root) # Config Table lb_symbol = tk.Label(self.config_frame, width = 20)", "10) lb_value[\"text\"] = \"Value\" lb_value.grid(row = 0, column = 2)", "val_config in enumerate(self.control.getValConfigs()): symbol_entry = tk.Entry(self.value_config_frame, width=20) symbol_entry.insert(tk.END, val_config.symbol) symbol_entry.config(state", "+ 1, column = 3) self.value_config_frame.pack(side=tk.TOP, anchor=tk.W) # Generator Button", "self.root.mainloop() def generateHeader(self): self.control.generateHeader() tk.messagebox.showinfo(\"Header Generator Info\", \"Generated:{0}\".format(self.control.header_config.path)) def update(self):", "value_entry.config(state = tk.DISABLED) value_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") value_entry.grid(row=", "= 2) self.config_frame.pack(side=tk.TOP, anchor=tk.NW) self.value_config_frame = tk.Frame(self.root) # Config Table", "update(self): pass def toggle_config_enable(self, id, button : tk.Button): config =", "1, column = 3) self.value_config_frame.pack(side=tk.TOP, anchor=tk.W) # Generator Button self.bt_generate", "config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground = \"white\")", "= not config.enable button[\"text\"] = \"ON\" if config.enable else \"OFF\"", "tk.Entry(self.value_config_frame, width=20) symbol_entry.insert(tk.END, val_config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\",", ": tk.Button): config = self.control.getConfigs()[id] config.enable = not config.enable button[\"text\"]", "width = 40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row = 0, column", "0, column = 2) for i, config in enumerate(self.control.getConfigs()): symbol_entry", "i, val_config in enumerate(self.control.getValConfigs()): symbol_entry = tk.Entry(self.value_config_frame, width=20) symbol_entry.insert(tk.END, val_config.symbol)", "detail_entry.grid(row= i + 1, column = 1) bt_enable = tk.Button(self.config_frame,", "= \"white\") detail_entry.grid(row= i + 1, column = 1) value_entry", "def __init__(self, control : Control.Control): self.control = control # Init", "0, column = 3) for i, val_config in enumerate(self.control.getValConfigs()): symbol_entry", "i + 1, column = 2) bt_enable = tk.Button(self.value_config_frame, text=\"ON\",", "\"red\" button.config(bg=color, activebackground = color) def toggle_val_config_enable(self, id, button :", "val_config.enable else \"OFF\" color = \"green\" if val_config.enable else \"red\"", "column = 0) detail_entry = tk.Entry(self.value_config_frame, width=40) detail_entry.insert(tk.END, val_config.detail) detail_entry.config(state", "0, column = 1) lb_value = tk.Label(self.value_config_frame, width = 10)", "lb_value.grid(row = 0, column = 2) lb_enable = tk.Label(self.value_config_frame, width", "5) bt_enable[\"text\"] = \"ON\" if val_config.enable else \"OFF\" color =", "text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\" if val_config.enable else \"OFF\"", "control # Init Window self.root = tk.Tk() self.root.title(u\"Header File Generator\")", "detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") detail_entry.grid(row=", "0, column = 2) lb_enable = tk.Label(self.value_config_frame, width = 10)", "val_config = self.control.getValConfigs()[id] val_config.enable = not val_config.enable button[\"text\"] = \"ON\"", "lb_enable.grid(row = 0, column = 3) for i, val_config in", "= 0, column = 1) lb_enable = tk.Label(self.config_frame, width =", "text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\" if config.enable else \"OFF\"", "\"Value\" lb_value.grid(row = 0, column = 2) lb_enable = tk.Label(self.value_config_frame,", "i + 1, column = 3) self.value_config_frame.pack(side=tk.TOP, anchor=tk.W) # Generator", "config.enable = not config.enable button[\"text\"] = \"ON\" if config.enable else", "= \"white\") detail_entry.grid(row= i + 1, column = 1) bt_enable", "id, button : tk.Button): config = self.control.getConfigs()[id] config.enable = not", "= 1) lb_enable = tk.Label(self.config_frame, width = 10) lb_enable[\"text\"] =", "= tk.Label(self.value_config_frame, width = 10) lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row =", "0, column = 1) lb_enable = tk.Label(self.config_frame, width = 10)", "\"white\") symbol_entry.grid(row= i + 1, column = 0) detail_entry =", "= \"white\") symbol_entry.grid(row= i + 1, column = 0) detail_entry", "= not val_config.enable button[\"text\"] = \"ON\" if val_config.enable else \"OFF\"", "val_config.enable = not val_config.enable button[\"text\"] = \"ON\" if val_config.enable else", "= tk.Entry(self.value_config_frame, width=40) detail_entry.insert(tk.END, val_config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground =", "value_entry.insert(tk.END, val_config.value) value_entry.config(state = tk.DISABLED) value_entry.config(disabledforeground = \"black\", disabledbackground =", "1, column = 1) bt_enable = tk.Button(self.config_frame, text=\"ON\", width= 5)", "= tk.Entry(self.config_frame, width=40) detail_entry.insert(tk.END, config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground =", "width=40) detail_entry.insert(tk.END, config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground = \"black\", disabledbackground", "= \"Enable\" lb_enable.grid(row = 0, column = 3) for i,", "column = 3) for i, val_config in enumerate(self.control.getValConfigs()): symbol_entry =", "\"Symbol\" lb_symbol.grid(row = 0, column = 0) lb_description = tk.Label(self.value_config_frame,", "self.config_frame = tk.Frame(self.root) # Config Table lb_symbol = tk.Label(self.config_frame, width", "= i + 1, column = 3) self.value_config_frame.pack(side=tk.TOP, anchor=tk.W) #", "color) bt_enable[\"command\"] = lambda id = i, button = bt_enable", "= \"Generate Header\" self.bt_generate[\"command\"] = self.generateHeader self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE) def start(self):", "if config.enable else \"red\" button.config(bg=color, activebackground = color) def toggle_val_config_enable(self,", "= color) def toggle_val_config_enable(self, id, button : tk.Button): val_config =", "__init__(self, control : Control.Control): self.control = control # Init Window", "config.enable else \"red\" bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"] = lambda", "+ 1, column = 1) bt_enable = tk.Button(self.config_frame, text=\"ON\", width=", "lb_enable.grid(row = 0, column = 2) for i, config in", "\"Enable\" lb_enable.grid(row = 0, column = 3) for i, val_config", "self.control.generateHeader() tk.messagebox.showinfo(\"Header Generator Info\", \"Generated:{0}\".format(self.control.header_config.path)) def update(self): pass def toggle_config_enable(self,", "i, button = bt_enable : self.toggle_val_config_enable(id, button) bt_enable.grid(row = i", "disabledbackground = \"white\") symbol_entry.grid(row= i + 1, column = 0)", "import tkinter as tk import tkinter.messagebox from Control import Control", "lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row = 0, column = 3) for", "0, column = 0) lb_description = tk.Label(self.value_config_frame, width = 40)", "button) bt_enable.grid(row = i + 1, column = 3) self.value_config_frame.pack(side=tk.TOP,", "= i, button = bt_enable : self.toggle_config_enable(id, button) bt_enable.grid(row =", "bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"] = lambda id = i,", "= \"Enable\" lb_enable.grid(row = 0, column = 2) for i,", "column = 0) lb_description = tk.Label(self.config_frame, width = 40) lb_description[\"text\"]", "= tk.Entry(self.config_frame, width=20) symbol_entry.insert(tk.END, config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground =", "lb_value[\"text\"] = \"Value\" lb_value.grid(row = 0, column = 2) lb_enable", "bt_enable[\"command\"] = lambda id = i, button = bt_enable :", "\"ON\" if config.enable else \"OFF\" color = \"green\" if config.enable", "Init Window self.root = tk.Tk() self.root.title(u\"Header File Generator\") self.root.geometry(\"700x800\") self.config_frame", "View: def __init__(self, control : Control.Control): self.control = control #", "lb_symbol = tk.Label(self.value_config_frame, width = 20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row", "= tk.Entry(self.value_config_frame, width=10) value_entry.insert(tk.END, val_config.value) value_entry.config(state = tk.DISABLED) value_entry.config(disabledforeground =", "= color) bt_enable[\"command\"] = lambda id = i, button =", "button[\"text\"] = \"ON\" if config.enable else \"OFF\" color = \"green\"", "else \"OFF\" color = \"green\" if config.enable else \"red\" button.config(bg=color,", "bt_enable : self.toggle_config_enable(id, button) bt_enable.grid(row = i + 1, column", "3) self.value_config_frame.pack(side=tk.TOP, anchor=tk.W) # Generator Button self.bt_generate = tk.Button(self.root) self.bt_generate[\"text\"]", "detail_entry = tk.Entry(self.config_frame, width=40) detail_entry.insert(tk.END, config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground", "width = 20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row = 0, column", "+ 1, column = 0) detail_entry = tk.Entry(self.value_config_frame, width=40) detail_entry.insert(tk.END,", "= \"black\", disabledbackground = \"white\") value_entry.grid(row= i + 1, column", "+ 1, column = 2) bt_enable = tk.Button(self.value_config_frame, text=\"ON\", width=", "# Generator Button self.bt_generate = tk.Button(self.root) self.bt_generate[\"text\"] = \"Generate Header\"", "column = 1) bt_enable = tk.Button(self.config_frame, text=\"ON\", width= 5) bt_enable[\"text\"]", "= bt_enable : self.toggle_val_config_enable(id, button) bt_enable.grid(row = i + 1,", "= 1) bt_enable = tk.Button(self.config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] =", "1, column = 0) detail_entry = tk.Entry(self.config_frame, width=40) detail_entry.insert(tk.END, config.detail)", "\"OFF\" color = \"green\" if config.enable else \"red\" bt_enable.config(bg=color, activebackground", "tk import tkinter.messagebox from Control import Control class View: def", "val_config.enable button[\"text\"] = \"ON\" if val_config.enable else \"OFF\" color =", ": self.toggle_val_config_enable(id, button) bt_enable.grid(row = i + 1, column =", "tk.Label(self.config_frame, width = 40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row = 0,", "detail_entry = tk.Entry(self.value_config_frame, width=40) detail_entry.insert(tk.END, val_config.detail) detail_entry.config(state = tk.DISABLED) detail_entry.config(disabledforeground", "self.root.geometry(\"700x800\") self.config_frame = tk.Frame(self.root) # Config Table lb_symbol = tk.Label(self.config_frame,", "in enumerate(self.control.getConfigs()): symbol_entry = tk.Entry(self.config_frame, width=20) symbol_entry.insert(tk.END, config.symbol) symbol_entry.config(state =", "tk.Button): config = self.control.getConfigs()[id] config.enable = not config.enable button[\"text\"] =", "1) lb_value = tk.Label(self.value_config_frame, width = 10) lb_value[\"text\"] = \"Value\"", "id, button : tk.Button): val_config = self.control.getValConfigs()[id] val_config.enable = not", "anchor=tk.W) # Generator Button self.bt_generate = tk.Button(self.root) self.bt_generate[\"text\"] = \"Generate", "0, column = 0) lb_description = tk.Label(self.config_frame, width = 40)", "tk.DISABLED) value_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") value_entry.grid(row= i +", "Config Table lb_symbol = tk.Label(self.config_frame, width = 20) lb_symbol[\"text\"] =", "= 0, column = 3) for i, val_config in enumerate(self.control.getValConfigs()):", ": self.toggle_config_enable(id, button) bt_enable.grid(row = i + 1, column =", "lb_enable[\"text\"] = \"Enable\" lb_enable.grid(row = 0, column = 2) for", "= 40) lb_description[\"text\"] = \"Detail\" lb_description.grid(row = 0, column =", "value_entry.grid(row= i + 1, column = 2) bt_enable = tk.Button(self.value_config_frame,", "\"Symbol\" lb_symbol.grid(row = 0, column = 0) lb_description = tk.Label(self.config_frame,", "enumerate(self.control.getConfigs()): symbol_entry = tk.Entry(self.config_frame, width=20) symbol_entry.insert(tk.END, config.symbol) symbol_entry.config(state = tk.DISABLED)", "= 1) lb_value = tk.Label(self.value_config_frame, width = 10) lb_value[\"text\"] =", "val_config.enable else \"red\" bt_enable.config(bg=color, activebackground = color) bt_enable[\"command\"] = lambda", "color = \"green\" if val_config.enable else \"red\" button.config(bg=color, activebackground =", "0) lb_description = tk.Label(self.value_config_frame, width = 40) lb_description[\"text\"] = \"Detail\"", "0) detail_entry = tk.Entry(self.value_config_frame, width=40) detail_entry.insert(tk.END, val_config.detail) detail_entry.config(state = tk.DISABLED)", "self.bt_generate[\"command\"] = self.generateHeader self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE) def start(self): self.root.mainloop() def generateHeader(self):", "width= 5) bt_enable[\"text\"] = \"ON\" if val_config.enable else \"OFF\" color", "tk.Entry(self.config_frame, width=20) symbol_entry.insert(tk.END, config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\",", "Table lb_symbol = tk.Label(self.config_frame, width = 20) lb_symbol[\"text\"] = \"Symbol\"", "if config.enable else \"OFF\" color = \"green\" if config.enable else", "tk.Button): val_config = self.control.getValConfigs()[id] val_config.enable = not val_config.enable button[\"text\"] =", "= 2) for i, config in enumerate(self.control.getConfigs()): symbol_entry = tk.Entry(self.config_frame,", "symbol_entry.insert(tk.END, val_config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground =", "= \"Value\" lb_value.grid(row = 0, column = 2) lb_enable =", "tk.Label(self.config_frame, width = 20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row = 0,", "self.root.title(u\"Header File Generator\") self.root.geometry(\"700x800\") self.config_frame = tk.Frame(self.root) # Config Table", "width= 5) bt_enable[\"text\"] = \"ON\" if config.enable else \"OFF\" color", "tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground = \"white\") symbol_entry.grid(row= i +", "# Config Table lb_symbol = tk.Label(self.value_config_frame, width = 20) lb_symbol[\"text\"]", "lb_symbol = tk.Label(self.config_frame, width = 20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row", "i + 1, column = 0) detail_entry = tk.Entry(self.value_config_frame, width=40)", "lb_description.grid(row = 0, column = 1) lb_value = tk.Label(self.value_config_frame, width", "width=20) symbol_entry.insert(tk.END, val_config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground", "= 20) lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row = 0, column =", "\"Generate Header\" self.bt_generate[\"command\"] = self.generateHeader self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE) def start(self): self.root.mainloop()", "button) bt_enable.grid(row = i + 1, column = 2) self.config_frame.pack(side=tk.TOP,", "= tk.Frame(self.root) # Config Table lb_symbol = tk.Label(self.config_frame, width =", "= self.generateHeader self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE) def start(self): self.root.mainloop() def generateHeader(self): self.control.generateHeader()", "tk.Tk() self.root.title(u\"Header File Generator\") self.root.geometry(\"700x800\") self.config_frame = tk.Frame(self.root) # Config", "bt_enable = tk.Button(self.config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\" if", "\"black\", disabledbackground = \"white\") value_entry.grid(row= i + 1, column =", "width = 10) lb_value[\"text\"] = \"Value\" lb_value.grid(row = 0, column", "column = 1) lb_enable = tk.Label(self.config_frame, width = 10) lb_enable[\"text\"]", "symbol_entry.grid(row= i + 1, column = 0) detail_entry = tk.Entry(self.config_frame,", "width=20) symbol_entry.insert(tk.END, config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground = \"black\", disabledbackground", "symbol_entry = tk.Entry(self.config_frame, width=20) symbol_entry.insert(tk.END, config.symbol) symbol_entry.config(state = tk.DISABLED) symbol_entry.config(disabledforeground", "not val_config.enable button[\"text\"] = \"ON\" if val_config.enable else \"OFF\" color", "\"white\") detail_entry.grid(row= i + 1, column = 1) bt_enable =", "for i, val_config in enumerate(self.control.getValConfigs()): symbol_entry = tk.Entry(self.value_config_frame, width=20) symbol_entry.insert(tk.END,", "Generator Info\", \"Generated:{0}\".format(self.control.header_config.path)) def update(self): pass def toggle_config_enable(self, id, button", "Table lb_symbol = tk.Label(self.value_config_frame, width = 20) lb_symbol[\"text\"] = \"Symbol\"", "in enumerate(self.control.getValConfigs()): symbol_entry = tk.Entry(self.value_config_frame, width=20) symbol_entry.insert(tk.END, val_config.symbol) symbol_entry.config(state =", "import tkinter.messagebox from Control import Control class View: def __init__(self,", "\"white\") value_entry.grid(row= i + 1, column = 2) bt_enable =", "lb_description[\"text\"] = \"Detail\" lb_description.grid(row = 0, column = 1) lb_value", "activebackground = color) bt_enable[\"command\"] = lambda id = i, button", "File Generator\") self.root.geometry(\"700x800\") self.config_frame = tk.Frame(self.root) # Config Table lb_symbol", "= 0) detail_entry = tk.Entry(self.config_frame, width=40) detail_entry.insert(tk.END, config.detail) detail_entry.config(state =", "tk.Button(self.root) self.bt_generate[\"text\"] = \"Generate Header\" self.bt_generate[\"command\"] = self.generateHeader self.bt_generate.pack(side=tk.BOTTOM, anchor=tk.SE)", "bt_enable = tk.Button(self.value_config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\" if", "= tk.Button(self.config_frame, text=\"ON\", width= 5) bt_enable[\"text\"] = \"ON\" if config.enable", "def generateHeader(self): self.control.generateHeader() tk.messagebox.showinfo(\"Header Generator Info\", \"Generated:{0}\".format(self.control.header_config.path)) def update(self): pass", "column = 2) lb_enable = tk.Label(self.value_config_frame, width = 10) lb_enable[\"text\"]", "config.enable else \"red\" button.config(bg=color, activebackground = color) def toggle_val_config_enable(self, id,", "2) for i, config in enumerate(self.control.getConfigs()): symbol_entry = tk.Entry(self.config_frame, width=20)", "lb_symbol[\"text\"] = \"Symbol\" lb_symbol.grid(row = 0, column = 0) lb_description" ]
[ "\"\"\" db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1) test_script_1 = \"\"\"select count(*)", "===================== 5 \"\"\" @pytest.mark.version('>=3.0') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1", "TIMESTAMP if index is used # decription: # tracker_id: CORE-3355", "= \"\"\"create table tdate (id integer not null primary key,", "tdate (id integer not null primary key, val date); create", "tdate values (3, '1998-01-03'); insert into tdate values (4, '1998-01-04');", "tdate values (4, '1998-01-04'); insert into tdate values (5, '1998-01-05');", "(val); commit; insert into tdate values (0, '1997-12-31'); insert into", "tdate values (5, '1998-01-05'); commit; \"\"\" db_1 = db_factory(page_size=4096, sql_dialect=3,", "'1998-01-04'); insert into tdate values (5, '1998-01-05'); commit; \"\"\" db_1", "val < timestamp'1998-01-04 12:00:00.0000'; \"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)", "12:00:00.0000'; \"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\"", "key, val date); create index tdateix1 on tdate (val); commit;", "(id integer not null primary key, val date); create index", "version: 3.0 # resources: None substitutions_1 = [] init_script_1 =", "= [] init_script_1 = \"\"\"create table tdate (id integer not", "= \"\"\"select count(*) from tdate where val >= timestamp'1998-01-04 12:00:00.0000';", "values (2, '1998-01-02'); insert into tdate values (3, '1998-01-03'); insert", "\"\"\" COUNT ===================== 1 COUNT ===================== 5 \"\"\" @pytest.mark.version('>=3.0') def", "bugs.core_3355 # title: Wrong comparsion of DATE and TIMESTAMP if", "init_script_1 = \"\"\"create table tdate (id integer not null primary", "# resources: None substitutions_1 = [] init_script_1 = \"\"\"create table", "# version: 3.0 # resources: None substitutions_1 = [] init_script_1", "if index is used # decription: # tracker_id: CORE-3355 #", "comparsion of DATE and TIMESTAMP if index is used #", "COUNT ===================== 1 COUNT ===================== 5 \"\"\" @pytest.mark.version('>=3.0') def test_1(act_1:", "tdate where val < timestamp'1998-01-04 12:00:00.0000'; \"\"\" act_1 = isql_act('db_1',", "(0, '1997-12-31'); insert into tdate values (1, '1998-01-01'); insert into", "(3, '1998-01-03'); insert into tdate values (4, '1998-01-04'); insert into", "title: Wrong comparsion of DATE and TIMESTAMP if index is", "[] init_script_1 = \"\"\"create table tdate (id integer not null", "substitutions_1 = [] init_script_1 = \"\"\"create table tdate (id integer", "['2.1.5'] # versions: 3.0 # qmid: None import pytest from", "index is used # decription: # tracker_id: CORE-3355 # min_versions:", "values (4, '1998-01-04'); insert into tdate values (5, '1998-01-05'); commit;", "(1, '1998-01-01'); insert into tdate values (2, '1998-01-02'); insert into", "date); create index tdateix1 on tdate (val); commit; insert into", "of DATE and TIMESTAMP if index is used # decription:", "<filename>tests/bugs/core_3355_test.py #coding:utf-8 # # id: bugs.core_3355 # title: Wrong comparsion", "count(*) from tdate where val >= timestamp'1998-01-04 12:00:00.0000'; select count(*)", "test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.execute() assert act_1.clean_stdout == act_1.clean_expected_stdout", "on tdate (val); commit; insert into tdate values (0, '1997-12-31');", "# id: bugs.core_3355 # title: Wrong comparsion of DATE and", "\"\"\"create table tdate (id integer not null primary key, val", "timestamp'1998-01-04 12:00:00.0000'; \"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 =", "# versions: 3.0 # qmid: None import pytest from firebird.qa", "1 COUNT ===================== 5 \"\"\" @pytest.mark.version('>=3.0') def test_1(act_1: Action): act_1.expected_stdout", "not null primary key, val date); create index tdateix1 on", "COUNT ===================== 5 \"\"\" @pytest.mark.version('>=3.0') def test_1(act_1: Action): act_1.expected_stdout =", "values (5, '1998-01-05'); commit; \"\"\" db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1)", "tdate where val >= timestamp'1998-01-04 12:00:00.0000'; select count(*) from tdate", "# tracker_id: CORE-3355 # min_versions: ['2.1.5'] # versions: 3.0 #", "# qmid: None import pytest from firebird.qa import db_factory, isql_act,", "decription: # tracker_id: CORE-3355 # min_versions: ['2.1.5'] # versions: 3.0", "(5, '1998-01-05'); commit; \"\"\" db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1) test_script_1", "used # decription: # tracker_id: CORE-3355 # min_versions: ['2.1.5'] #", "test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" COUNT ===================== 1 COUNT =====================", "val date); create index tdateix1 on tdate (val); commit; insert", "'1997-12-31'); insert into tdate values (1, '1998-01-01'); insert into tdate", "create index tdateix1 on tdate (val); commit; insert into tdate", "= db_factory(page_size=4096, sql_dialect=3, init=init_script_1) test_script_1 = \"\"\"select count(*) from tdate", "pytest from firebird.qa import db_factory, isql_act, Action # version: 3.0", "(2, '1998-01-02'); insert into tdate values (3, '1998-01-03'); insert into", "'1998-01-02'); insert into tdate values (3, '1998-01-03'); insert into tdate", "# # id: bugs.core_3355 # title: Wrong comparsion of DATE", "values (3, '1998-01-03'); insert into tdate values (4, '1998-01-04'); insert", "tracker_id: CORE-3355 # min_versions: ['2.1.5'] # versions: 3.0 # qmid:", "insert into tdate values (4, '1998-01-04'); insert into tdate values", "5 \"\"\" @pytest.mark.version('>=3.0') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.execute()", "insert into tdate values (2, '1998-01-02'); insert into tdate values", "substitutions=substitutions_1) expected_stdout_1 = \"\"\" COUNT ===================== 1 COUNT ===================== 5", "\"\"\"select count(*) from tdate where val >= timestamp'1998-01-04 12:00:00.0000'; select", "select count(*) from tdate where val < timestamp'1998-01-04 12:00:00.0000'; \"\"\"", "firebird.qa import db_factory, isql_act, Action # version: 3.0 # resources:", "CORE-3355 # min_versions: ['2.1.5'] # versions: 3.0 # qmid: None", ">= timestamp'1998-01-04 12:00:00.0000'; select count(*) from tdate where val <", "insert into tdate values (3, '1998-01-03'); insert into tdate values", "import db_factory, isql_act, Action # version: 3.0 # resources: None", "act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" COUNT =====================", "db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1) test_script_1 = \"\"\"select count(*) from", "from tdate where val < timestamp'1998-01-04 12:00:00.0000'; \"\"\" act_1 =", "resources: None substitutions_1 = [] init_script_1 = \"\"\"create table tdate", "< timestamp'1998-01-04 12:00:00.0000'; \"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1", "isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" COUNT ===================== 1 COUNT", "from firebird.qa import db_factory, isql_act, Action # version: 3.0 #", "id: bugs.core_3355 # title: Wrong comparsion of DATE and TIMESTAMP", "expected_stdout_1 = \"\"\" COUNT ===================== 1 COUNT ===================== 5 \"\"\"", "into tdate values (2, '1998-01-02'); insert into tdate values (3,", "is used # decription: # tracker_id: CORE-3355 # min_versions: ['2.1.5']", "tdate (val); commit; insert into tdate values (0, '1997-12-31'); insert", "= \"\"\" COUNT ===================== 1 COUNT ===================== 5 \"\"\" @pytest.mark.version('>=3.0')", "where val >= timestamp'1998-01-04 12:00:00.0000'; select count(*) from tdate where", "into tdate values (3, '1998-01-03'); insert into tdate values (4,", "val >= timestamp'1998-01-04 12:00:00.0000'; select count(*) from tdate where val", "db_factory, isql_act, Action # version: 3.0 # resources: None substitutions_1", "DATE and TIMESTAMP if index is used # decription: #", "@pytest.mark.version('>=3.0') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.execute() assert act_1.clean_stdout", "Wrong comparsion of DATE and TIMESTAMP if index is used", "min_versions: ['2.1.5'] # versions: 3.0 # qmid: None import pytest", "def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.execute() assert act_1.clean_stdout ==", "isql_act, Action # version: 3.0 # resources: None substitutions_1 =", "insert into tdate values (5, '1998-01-05'); commit; \"\"\" db_1 =", "values (0, '1997-12-31'); insert into tdate values (1, '1998-01-01'); insert", "# min_versions: ['2.1.5'] # versions: 3.0 # qmid: None import", "insert into tdate values (1, '1998-01-01'); insert into tdate values", "tdate values (2, '1998-01-02'); insert into tdate values (3, '1998-01-03');", "into tdate values (0, '1997-12-31'); insert into tdate values (1,", "into tdate values (4, '1998-01-04'); insert into tdate values (5,", "db_factory(page_size=4096, sql_dialect=3, init=init_script_1) test_script_1 = \"\"\"select count(*) from tdate where", "commit; \"\"\" db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1) test_script_1 = \"\"\"select", "3.0 # qmid: None import pytest from firebird.qa import db_factory,", "\"\"\" act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" COUNT", "tdate values (0, '1997-12-31'); insert into tdate values (1, '1998-01-01');", "# title: Wrong comparsion of DATE and TIMESTAMP if index", "insert into tdate values (0, '1997-12-31'); insert into tdate values", "===================== 1 COUNT ===================== 5 \"\"\" @pytest.mark.version('>=3.0') def test_1(act_1: Action):", "(4, '1998-01-04'); insert into tdate values (5, '1998-01-05'); commit; \"\"\"", "# decription: # tracker_id: CORE-3355 # min_versions: ['2.1.5'] # versions:", "test_script_1 = \"\"\"select count(*) from tdate where val >= timestamp'1998-01-04", "integer not null primary key, val date); create index tdateix1", "into tdate values (5, '1998-01-05'); commit; \"\"\" db_1 = db_factory(page_size=4096,", "= isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = \"\"\" COUNT ===================== 1", "index tdateix1 on tdate (val); commit; insert into tdate values", "null primary key, val date); create index tdateix1 on tdate", "sql_dialect=3, init=init_script_1) test_script_1 = \"\"\"select count(*) from tdate where val", "Action # version: 3.0 # resources: None substitutions_1 = []", "qmid: None import pytest from firebird.qa import db_factory, isql_act, Action", "table tdate (id integer not null primary key, val date);", "commit; insert into tdate values (0, '1997-12-31'); insert into tdate", "timestamp'1998-01-04 12:00:00.0000'; select count(*) from tdate where val < timestamp'1998-01-04", "'1998-01-03'); insert into tdate values (4, '1998-01-04'); insert into tdate", "'1998-01-01'); insert into tdate values (2, '1998-01-02'); insert into tdate", "from tdate where val >= timestamp'1998-01-04 12:00:00.0000'; select count(*) from", "None import pytest from firebird.qa import db_factory, isql_act, Action #", "import pytest from firebird.qa import db_factory, isql_act, Action # version:", "tdateix1 on tdate (val); commit; insert into tdate values (0,", "primary key, val date); create index tdateix1 on tdate (val);", "12:00:00.0000'; select count(*) from tdate where val < timestamp'1998-01-04 12:00:00.0000';", "where val < timestamp'1998-01-04 12:00:00.0000'; \"\"\" act_1 = isql_act('db_1', test_script_1,", "tdate values (1, '1998-01-01'); insert into tdate values (2, '1998-01-02');", "None substitutions_1 = [] init_script_1 = \"\"\"create table tdate (id", "#coding:utf-8 # # id: bugs.core_3355 # title: Wrong comparsion of", "3.0 # resources: None substitutions_1 = [] init_script_1 = \"\"\"create", "values (1, '1998-01-01'); insert into tdate values (2, '1998-01-02'); insert", "versions: 3.0 # qmid: None import pytest from firebird.qa import", "\"\"\" @pytest.mark.version('>=3.0') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.execute() assert", "into tdate values (1, '1998-01-01'); insert into tdate values (2,", "'1998-01-05'); commit; \"\"\" db_1 = db_factory(page_size=4096, sql_dialect=3, init=init_script_1) test_script_1 =", "init=init_script_1) test_script_1 = \"\"\"select count(*) from tdate where val >=", "count(*) from tdate where val < timestamp'1998-01-04 12:00:00.0000'; \"\"\" act_1", "and TIMESTAMP if index is used # decription: # tracker_id:" ]
[ "return \"pipeline_check_skipped\" def pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\") def end_pipeline(**kwargs): print(\"end_pipeline:\") def pipeline_check_skipped(**kwargs):", "files (Source: S3, Staging: EC2: Target: RDS Oracle)\", default_args=default_args, schedule_interval=None,", "BranchPythonOperator from datetime import date, timedelta, datetime from collections import", "airflow.operators.dummy_operator import DummyOperator default_args = { \"owner\": \"anilkdegala\", \"depends_on_past\": True,", "python_callable=begin_pipeline, provide_context=True, dag=dag, ) t_check_pipeline = BranchPythonOperator( task_id=\"check_pipeline\", python_callable=pipeline_enable_check, provide_context=True,", "trigger_rule=\"none_failed\", dag=dag, ) t_notify = PythonOperator( task_id=\"send_notifications\", python_callable=notify, provide_context=True, trigger_rule=\"none_failed\",", "timedelta, datetime from collections import OrderedDict from scripts.dag_pebbles import DagPebbles", "DagPebbles() print(\"notify\") def end(**kwargs): dp = DagPebbles() print(\"end\") with DAG(", "\"is_paused_upon_creation\": False, } def begin_pipeline(**kwargs): print(\"begin_pipeline:\") files = kwargs['dag_run'].conf.get('files') download_decrypt_arguments", "= PythonOperator( task_id=\"begin_pipeline\", python_callable=begin_pipeline, provide_context=True, dag=dag, ) t_check_pipeline = BranchPythonOperator(", "DagPebbles from airflow.configuration import conf from scripts.configurations import * from", "value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments) def pipeline_enable_check(**kwargs): dp = DagPebbles() if dp.pipeline_enable_check('DATA_LOAD'):", "download_decrypt_arguments = '' transfer_arguments_list = [] for f in files:", "dag=dag, ) t_check_pipeline = BranchPythonOperator( task_id=\"check_pipeline\", python_callable=pipeline_enable_check, provide_context=True, dag=dag, )", "= BashOperator( task_id='download_files', bash_command=download_files_cmd, dag=dag) decrypt_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}", "trigger_rule=\"none_failed\", dag=dag, ) t_end = PythonOperator( task_id=\"end\", python_callable=end, provide_context=True, trigger_rule=\"none_failed\",", ">> t_pipeline_check_skipped >> t_end_pipeline t_check_pipeline >> t_pipeline_check_passed >> t_download_files >>", "Decrypt, Transfer files (Source: S3, Staging: EC2: Target: RDS Oracle)\",", "\"pipeline_check_passed\" else: return \"pipeline_check_skipped\" def pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\") def end_pipeline(**kwargs): print(\"end_pipeline:\")", "datetime from collections import OrderedDict from scripts.dag_pebbles import DagPebbles from", "+ \"/\"+f['final_name']) transfer_arguments = \",\".join(transfer_arguments_list) print(\"final download_decrypt_arguments: \",download_decrypt_arguments) print(\"final transfer_arguments:", "dag=dag, ) t_cleanup = PythonOperator( task_id=\"cleanup\", python_callable=cleanup, provide_context=True, trigger_rule=\"none_failed\", dag=dag,", "provide_context=True, dag=dag, ) t_check_pipeline = BranchPythonOperator( task_id=\"check_pipeline\", python_callable=pipeline_enable_check, provide_context=True, dag=dag,", "t_check_pipeline = BranchPythonOperator( task_id=\"check_pipeline\", python_callable=pipeline_enable_check, provide_context=True, dag=dag, ) t_pipeline_check_passed =", "\"anilkdegala\", \"depends_on_past\": True, \"max_active_runs\": 1, \"start_date\": datetime(2015, 6, 1), \"is_active\":", "\"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files = BashOperator( task_id='download_files', bash_command=download_files_cmd, dag=dag) decrypt_files_cmd", "from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from", "t_end_pipeline t_check_pipeline >> t_pipeline_check_passed >> t_download_files >> t_decrypt_files >> t_transfer_files", "download_decrypt_arguments = download_decrypt_arguments + \" \" + output transfer_arguments_list.append(DATA_LOCATION +", "\" t_decrypt_files = BashOperator( task_id='decrypt_files', bash_command=decrypt_files_cmd, dag=dag) transfer_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl", "OrderedDict from scripts.dag_pebbles import DagPebbles from airflow.configuration import conf from", "def end(**kwargs): dp = DagPebbles() print(\"end\") with DAG( \"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download,", "PythonOperator( task_id=\"cleanup\", python_callable=cleanup, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_end = PythonOperator(", "= \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}} \" t_decrypt_files = BashOperator( task_id='decrypt_files', bash_command=decrypt_files_cmd,", "= PythonOperator( task_id=\"end\", python_callable=end, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_pipeline_begin >>", "f['path']) output = f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments = download_decrypt_arguments + \" \"", "begin_pipeline(**kwargs): print(\"begin_pipeline:\") files = kwargs['dag_run'].conf.get('files') download_decrypt_arguments = '' transfer_arguments_list =", "ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files = BashOperator( task_id='download_files', bash_command=download_files_cmd, dag=dag) decrypt_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh", "provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_pipeline_begin >> t_check_pipeline t_check_pipeline >> t_pipeline_check_skipped", "end_pipeline(**kwargs): print(\"end_pipeline:\") def pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\") def cleanup(**kwargs): dp = DagPebbles()", "def pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\") def end_pipeline(**kwargs): print(\"end_pipeline:\") def pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\") def", "pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\") def cleanup(**kwargs): dp = DagPebbles() print(\"cleanup\") def notify(**kwargs):", "(Source: S3, Staging: EC2: Target: RDS Oracle)\", default_args=default_args, schedule_interval=None, catchup=False,", "python_callable=pipeline_enable_check, provide_context=True, dag=dag, ) t_pipeline_check_passed = PythonOperator( task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed, provide_context=True,", "\"owner\": \"anilkdegala\", \"depends_on_past\": True, \"max_active_runs\": 1, \"start_date\": datetime(2015, 6, 1),", "\",\".join(transfer_arguments_list) print(\"final download_decrypt_arguments: \",download_decrypt_arguments) print(\"final transfer_arguments: \",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\",", "RDS Oracle)\", default_args=default_args, schedule_interval=None, catchup=False, orientation=\"TB\", tags=['Utils'], dagrun_timeout=timedelta(hours=240) ) as", "task_id=\"end\", python_callable=end, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_pipeline_begin >> t_check_pipeline t_check_pipeline", "os from airflow import DAG from airflow.operators.bash_operator import BashOperator from", "transfer_arguments_list = [] for f in files: print(\"download_decrypt_transfer_files: file: \",", "def begin_pipeline(**kwargs): print(\"begin_pipeline:\") files = kwargs['dag_run'].conf.get('files') download_decrypt_arguments = '' transfer_arguments_list", "bash_command=transfer_files_cmd, dag=dag) t_end_pipeline = PythonOperator( task_id=\"end_pipeline\", python_callable=end_pipeline, provide_context=True, trigger_rule=\"none_failed\", dag=dag,", "python_callable=notify, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_cleanup = PythonOperator( task_id=\"cleanup\", python_callable=cleanup,", "import DagPebbles from airflow.configuration import conf from scripts.configurations import *", "anilkdegala \"\"\" import os from airflow import DAG from airflow.operators.bash_operator", "dp.pipeline_enable_check('DATA_LOAD'): return \"pipeline_check_passed\" else: return \"pipeline_check_skipped\" def pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\") def", "print(\"pipeline_check_skipped:\") def cleanup(**kwargs): dp = DagPebbles() print(\"cleanup\") def notify(**kwargs): dp", "transfer_arguments = \",\".join(transfer_arguments_list) print(\"final download_decrypt_arguments: \",download_decrypt_arguments) print(\"final transfer_arguments: \",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\",", "\"max_active_runs\": 1, \"start_date\": datetime(2015, 6, 1), \"is_active\": True, \"is_paused_upon_creation\": False,", "with DAG( \"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download, Decrypt, Transfer files (Source: S3, Staging:", "from airflow.configuration import conf from scripts.configurations import * from airflow.operators.dummy_operator", "t_pipeline_check_passed >> t_download_files >> t_decrypt_files >> t_transfer_files >> t_end_pipeline t_end_pipeline", "\"start_date\": datetime(2015, 6, 1), \"is_active\": True, \"is_paused_upon_creation\": False, } def", "6, 1), \"is_active\": True, \"is_paused_upon_creation\": False, } def begin_pipeline(**kwargs): print(\"begin_pipeline:\")", "task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed, provide_context=True, dag=dag, ) t_pipeline_check_skipped = PythonOperator( task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped,", "task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped, provide_context=True, dag=dag, ) download_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\"", "airflow.configuration import conf from scripts.configurations import * from airflow.operators.dummy_operator import", "t_download_files >> t_decrypt_files >> t_transfer_files >> t_end_pipeline t_end_pipeline >> t_cleanup", "task_id=\"cleanup\", python_callable=cleanup, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_end = PythonOperator( task_id=\"end\",", "= { \"owner\": \"anilkdegala\", \"depends_on_past\": True, \"max_active_runs\": 1, \"start_date\": datetime(2015,", "provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_end = PythonOperator( task_id=\"end\", python_callable=end, provide_context=True,", ") t_cleanup = PythonOperator( task_id=\"cleanup\", python_callable=cleanup, provide_context=True, trigger_rule=\"none_failed\", dag=dag, )", "import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator,", "', location: ', f['path']) output = f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments = download_decrypt_arguments", ") t_pipeline_check_skipped = PythonOperator( task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped, provide_context=True, dag=dag, ) download_files_cmd", "t_cleanup = PythonOperator( task_id=\"cleanup\", python_callable=cleanup, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_end", "= PythonOperator( task_id=\"cleanup\", python_callable=cleanup, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_end =", "provide_context=True, dag=dag, ) download_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files =", "= [] for f in files: print(\"download_decrypt_transfer_files: file: \", f['name'],", "PythonOperator( task_id=\"send_notifications\", python_callable=notify, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_cleanup = PythonOperator(", "value=transfer_arguments) def pipeline_enable_check(**kwargs): dp = DagPebbles() if dp.pipeline_enable_check('DATA_LOAD'): return \"pipeline_check_passed\"", "import os from airflow import DAG from airflow.operators.bash_operator import BashOperator", "= DagPebbles() print(\"notify\") def end(**kwargs): dp = DagPebbles() print(\"end\") with", "from airflow import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator", ">> t_decrypt_files >> t_transfer_files >> t_end_pipeline t_end_pipeline >> t_cleanup >>", "from scripts.dag_pebbles import DagPebbles from airflow.configuration import conf from scripts.configurations", "\"is_active\": True, \"is_paused_upon_creation\": False, } def begin_pipeline(**kwargs): print(\"begin_pipeline:\") files =", "return \"pipeline_check_passed\" else: return \"pipeline_check_skipped\" def pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\") def end_pipeline(**kwargs):", "PythonOperator( task_id=\"begin_pipeline\", python_callable=begin_pipeline, provide_context=True, dag=dag, ) t_check_pipeline = BranchPythonOperator( task_id=\"check_pipeline\",", "bash_command=decrypt_files_cmd, dag=dag) transfer_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{ ti.xcom_pull(key='transfer_arguments')}} \" t_transfer_files =", "task_id=\"end_pipeline\", python_callable=end_pipeline, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_notify = PythonOperator( task_id=\"send_notifications\",", "DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator", ">> t_check_pipeline t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline t_check_pipeline >> t_pipeline_check_passed", "False, } def begin_pipeline(**kwargs): print(\"begin_pipeline:\") files = kwargs['dag_run'].conf.get('files') download_decrypt_arguments =", "task_id=\"begin_pipeline\", python_callable=begin_pipeline, provide_context=True, dag=dag, ) t_check_pipeline = BranchPythonOperator( task_id=\"check_pipeline\", python_callable=pipeline_enable_check,", "pipeline_enable_check(**kwargs): dp = DagPebbles() if dp.pipeline_enable_check('DATA_LOAD'): return \"pipeline_check_passed\" else: return", "def pipeline_enable_check(**kwargs): dp = DagPebbles() if dp.pipeline_enable_check('DATA_LOAD'): return \"pipeline_check_passed\" else:", "', f['path']) output = f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments = download_decrypt_arguments + \"", "dag=dag, ) download_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files = BashOperator(", ">> t_download_files >> t_decrypt_files >> t_transfer_files >> t_end_pipeline t_end_pipeline >>", "PythonOperator( task_id=\"end_pipeline\", python_callable=end_pipeline, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_notify = PythonOperator(", "transfer_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{ ti.xcom_pull(key='transfer_arguments')}} \" t_transfer_files = BashOperator( task_id='transfer_files',", "= DagPebbles() print(\"cleanup\") def notify(**kwargs): dp = DagPebbles() print(\"notify\") def", "def end_pipeline(**kwargs): print(\"end_pipeline:\") def pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\") def cleanup(**kwargs): dp =", "notify(**kwargs): dp = DagPebbles() print(\"notify\") def end(**kwargs): dp = DagPebbles()", "print(\"notify\") def end(**kwargs): dp = DagPebbles() print(\"end\") with DAG( \"DOWNLOAD_DECRYPT_TRANSFER\",", "from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import date, timedelta,", "python_callable=end, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_pipeline_begin >> t_check_pipeline t_check_pipeline >>", "kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments) def pipeline_enable_check(**kwargs): dp = DagPebbles() if dp.pipeline_enable_check('DATA_LOAD'): return", "in files: print(\"download_decrypt_transfer_files: file: \", f['name'], ', location: ', f['path'])", "\", f['name'], ', location: ', f['path']) output = f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments", "\" \" + output transfer_arguments_list.append(DATA_LOCATION + \"/\"+f['final_name']) transfer_arguments = \",\".join(transfer_arguments_list)", "BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import date,", "t_check_pipeline t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline t_check_pipeline >> t_pipeline_check_passed >>", "collections import OrderedDict from scripts.dag_pebbles import DagPebbles from airflow.configuration import", "1, \"start_date\": datetime(2015, 6, 1), \"is_active\": True, \"is_paused_upon_creation\": False, }", "\",download_decrypt_arguments) print(\"final transfer_arguments: \",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments) def pipeline_enable_check(**kwargs):", "= DagPebbles() print(\"end\") with DAG( \"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download, Decrypt, Transfer files", "PythonOperator( task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped, provide_context=True, dag=dag, ) download_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{", "python_callable=end_pipeline, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_notify = PythonOperator( task_id=\"send_notifications\", python_callable=notify,", "file: \", f['name'], ', location: ', f['path']) output = f['name']+','+f['path']+','+f['final_name']", "conf from scripts.configurations import * from airflow.operators.dummy_operator import DummyOperator default_args", "'' transfer_arguments_list = [] for f in files: print(\"download_decrypt_transfer_files: file:", "print(\"final transfer_arguments: \",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments) def pipeline_enable_check(**kwargs): dp", "datetime(2015, 6, 1), \"is_active\": True, \"is_paused_upon_creation\": False, } def begin_pipeline(**kwargs):", "dp = DagPebbles() print(\"cleanup\") def notify(**kwargs): dp = DagPebbles() print(\"notify\")", "dp = DagPebbles() print(\"notify\") def end(**kwargs): dp = DagPebbles() print(\"end\")", ") t_end = PythonOperator( task_id=\"end\", python_callable=end, provide_context=True, trigger_rule=\"none_failed\", dag=dag, )", "\" + output transfer_arguments_list.append(DATA_LOCATION + \"/\"+f['final_name']) transfer_arguments = \",\".join(transfer_arguments_list) print(\"final", "datetime import date, timedelta, datetime from collections import OrderedDict from", "\"depends_on_past\": True, \"max_active_runs\": 1, \"start_date\": datetime(2015, 6, 1), \"is_active\": True,", "+ output transfer_arguments_list.append(DATA_LOCATION + \"/\"+f['final_name']) transfer_arguments = \",\".join(transfer_arguments_list) print(\"final download_decrypt_arguments:", "download_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files = BashOperator( task_id='download_files', bash_command=download_files_cmd,", "end(**kwargs): dp = DagPebbles() print(\"end\") with DAG( \"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download, Decrypt,", "BashOperator( task_id='transfer_files', bash_command=transfer_files_cmd, dag=dag) t_end_pipeline = PythonOperator( task_id=\"end_pipeline\", python_callable=end_pipeline, provide_context=True,", "t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline t_check_pipeline >> t_pipeline_check_passed >> t_download_files", "as dag: t_pipeline_begin = PythonOperator( task_id=\"begin_pipeline\", python_callable=begin_pipeline, provide_context=True, dag=dag, )", "\"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files = BashOperator( task_id='download_files', bash_command=download_files_cmd, dag=dag) decrypt_files_cmd =", "{ \"owner\": \"anilkdegala\", \"depends_on_past\": True, \"max_active_runs\": 1, \"start_date\": datetime(2015, 6,", ") t_check_pipeline = BranchPythonOperator( task_id=\"check_pipeline\", python_callable=pipeline_enable_check, provide_context=True, dag=dag, ) t_pipeline_check_passed", ">> t_pipeline_check_passed >> t_download_files >> t_decrypt_files >> t_transfer_files >> t_end_pipeline", "def notify(**kwargs): dp = DagPebbles() print(\"notify\") def end(**kwargs): dp =", ">> t_transfer_files >> t_end_pipeline t_end_pipeline >> t_cleanup >> t_notify >>", "= PythonOperator( task_id=\"send_notifications\", python_callable=notify, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_cleanup =", "= \",\".join(transfer_arguments_list) print(\"final download_decrypt_arguments: \",download_decrypt_arguments) print(\"final transfer_arguments: \",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments)", "PythonOperator( task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed, provide_context=True, dag=dag, ) t_pipeline_check_skipped = PythonOperator( task_id=\"pipeline_check_skipped\",", "S3, Staging: EC2: Target: RDS Oracle)\", default_args=default_args, schedule_interval=None, catchup=False, orientation=\"TB\",", "= PythonOperator( task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed, provide_context=True, dag=dag, ) t_pipeline_check_skipped = PythonOperator(", "dagrun_timeout=timedelta(hours=240) ) as dag: t_pipeline_begin = PythonOperator( task_id=\"begin_pipeline\", python_callable=begin_pipeline, provide_context=True,", "= \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files = BashOperator( task_id='download_files', bash_command=download_files_cmd, dag=dag)", "DagPebbles() print(\"cleanup\") def notify(**kwargs): dp = DagPebbles() print(\"notify\") def end(**kwargs):", "= PythonOperator( task_id=\"end_pipeline\", python_callable=end_pipeline, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_notify =", "PythonOperator( task_id=\"end\", python_callable=end, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_pipeline_begin >> t_check_pipeline", "@author: anilkdegala \"\"\" import os from airflow import DAG from", "task_id=\"send_notifications\", python_callable=notify, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_cleanup = PythonOperator( task_id=\"cleanup\",", "trigger_rule=\"none_failed\", dag=dag, ) t_pipeline_begin >> t_check_pipeline t_check_pipeline >> t_pipeline_check_skipped >>", "1), \"is_active\": True, \"is_paused_upon_creation\": False, } def begin_pipeline(**kwargs): print(\"begin_pipeline:\") files", "output transfer_arguments_list.append(DATA_LOCATION + \"/\"+f['final_name']) transfer_arguments = \",\".join(transfer_arguments_list) print(\"final download_decrypt_arguments: \",download_decrypt_arguments)", "kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments) def pipeline_enable_check(**kwargs): dp = DagPebbles() if", "provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_notify = PythonOperator( task_id=\"send_notifications\", python_callable=notify, provide_context=True,", "t_pipeline_check_passed = PythonOperator( task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed, provide_context=True, dag=dag, ) t_pipeline_check_skipped =", "\"\"\" @author: anilkdegala \"\"\" import os from airflow import DAG", "= PythonOperator( task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped, provide_context=True, dag=dag, ) download_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh", "= BashOperator( task_id='decrypt_files', bash_command=decrypt_files_cmd, dag=dag) transfer_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{ ti.xcom_pull(key='transfer_arguments')}}", "t_decrypt_files = BashOperator( task_id='decrypt_files', bash_command=decrypt_files_cmd, dag=dag) transfer_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{", "download_decrypt_arguments + \" \" + output transfer_arguments_list.append(DATA_LOCATION + \"/\"+f['final_name']) transfer_arguments", "if dp.pipeline_enable_check('DATA_LOAD'): return \"pipeline_check_passed\" else: return \"pipeline_check_skipped\" def pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\")", "import DummyOperator default_args = { \"owner\": \"anilkdegala\", \"depends_on_past\": True, \"max_active_runs\":", "import date, timedelta, datetime from collections import OrderedDict from scripts.dag_pebbles", "t_pipeline_begin >> t_check_pipeline t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline t_check_pipeline >>", "dag: t_pipeline_begin = PythonOperator( task_id=\"begin_pipeline\", python_callable=begin_pipeline, provide_context=True, dag=dag, ) t_check_pipeline", "transfer_arguments: \",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments) def pipeline_enable_check(**kwargs): dp =", "catchup=False, orientation=\"TB\", tags=['Utils'], dagrun_timeout=timedelta(hours=240) ) as dag: t_pipeline_begin = PythonOperator(", "print(\"final download_decrypt_arguments: \",download_decrypt_arguments) print(\"final transfer_arguments: \",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments)", "description=\"Download, Decrypt, Transfer files (Source: S3, Staging: EC2: Target: RDS", "\"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}} \" t_decrypt_files = BashOperator( task_id='decrypt_files', bash_command=decrypt_files_cmd, dag=dag)", "dag=dag) transfer_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{ ti.xcom_pull(key='transfer_arguments')}} \" t_transfer_files = BashOperator(", "* from airflow.operators.dummy_operator import DummyOperator default_args = { \"owner\": \"anilkdegala\",", "= kwargs['dag_run'].conf.get('files') download_decrypt_arguments = '' transfer_arguments_list = [] for f", "True, \"max_active_runs\": 1, \"start_date\": datetime(2015, 6, 1), \"is_active\": True, \"is_paused_upon_creation\":", "= \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{ ti.xcom_pull(key='transfer_arguments')}} \" t_transfer_files = BashOperator( task_id='transfer_files', bash_command=transfer_files_cmd,", "dag=dag, ) t_end = PythonOperator( task_id=\"end\", python_callable=end, provide_context=True, trigger_rule=\"none_failed\", dag=dag,", "import PythonOperator, BranchPythonOperator from datetime import date, timedelta, datetime from", "cleanup(**kwargs): dp = DagPebbles() print(\"cleanup\") def notify(**kwargs): dp = DagPebbles()", "airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime", "DummyOperator default_args = { \"owner\": \"anilkdegala\", \"depends_on_past\": True, \"max_active_runs\": 1,", "DAG( \"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download, Decrypt, Transfer files (Source: S3, Staging: EC2:", "PythonOperator, BranchPythonOperator from datetime import date, timedelta, datetime from collections", "def pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\") def cleanup(**kwargs): dp = DagPebbles() print(\"cleanup\") def", "dag=dag) t_end_pipeline = PythonOperator( task_id=\"end_pipeline\", python_callable=end_pipeline, provide_context=True, trigger_rule=\"none_failed\", dag=dag, )", ") t_notify = PythonOperator( task_id=\"send_notifications\", python_callable=notify, provide_context=True, trigger_rule=\"none_failed\", dag=dag, )", "from collections import OrderedDict from scripts.dag_pebbles import DagPebbles from airflow.configuration", "ti.xcom_pull(key='transfer_arguments')}} \" t_transfer_files = BashOperator( task_id='transfer_files', bash_command=transfer_files_cmd, dag=dag) t_end_pipeline =", "task_id='transfer_files', bash_command=transfer_files_cmd, dag=dag) t_end_pipeline = PythonOperator( task_id=\"end_pipeline\", python_callable=end_pipeline, provide_context=True, trigger_rule=\"none_failed\",", "\",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments) def pipeline_enable_check(**kwargs): dp = DagPebbles()", "def cleanup(**kwargs): dp = DagPebbles() print(\"cleanup\") def notify(**kwargs): dp =", "= DagPebbles() if dp.pipeline_enable_check('DATA_LOAD'): return \"pipeline_check_passed\" else: return \"pipeline_check_skipped\" def", "files: print(\"download_decrypt_transfer_files: file: \", f['name'], ', location: ', f['path']) output", "pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\") def end_pipeline(**kwargs): print(\"end_pipeline:\") def pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\") def cleanup(**kwargs):", ") as dag: t_pipeline_begin = PythonOperator( task_id=\"begin_pipeline\", python_callable=begin_pipeline, provide_context=True, dag=dag,", "= f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments = download_decrypt_arguments + \" \" + output", "t_pipeline_begin = PythonOperator( task_id=\"begin_pipeline\", python_callable=begin_pipeline, provide_context=True, dag=dag, ) t_check_pipeline =", "EC2: Target: RDS Oracle)\", default_args=default_args, schedule_interval=None, catchup=False, orientation=\"TB\", tags=['Utils'], dagrun_timeout=timedelta(hours=240)", "<reponame>hms-dbmi/bch-pic-sure-airflow-dags \"\"\" @author: anilkdegala \"\"\" import os from airflow import", ") t_pipeline_check_passed = PythonOperator( task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed, provide_context=True, dag=dag, ) t_pipeline_check_skipped", "= BashOperator( task_id='transfer_files', bash_command=transfer_files_cmd, dag=dag) t_end_pipeline = PythonOperator( task_id=\"end_pipeline\", python_callable=end_pipeline,", "trigger_rule=\"none_failed\", dag=dag, ) t_cleanup = PythonOperator( task_id=\"cleanup\", python_callable=cleanup, provide_context=True, trigger_rule=\"none_failed\",", "t_decrypt_files >> t_transfer_files >> t_end_pipeline t_end_pipeline >> t_cleanup >> t_notify", "= BranchPythonOperator( task_id=\"check_pipeline\", python_callable=pipeline_enable_check, provide_context=True, dag=dag, ) t_pipeline_check_passed = PythonOperator(", "provide_context=True, dag=dag, ) t_pipeline_check_skipped = PythonOperator( task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped, provide_context=True, dag=dag,", ") download_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files = BashOperator( task_id='download_files',", "print(\"begin_pipeline:\") files = kwargs['dag_run'].conf.get('files') download_decrypt_arguments = '' transfer_arguments_list = []", "default_args = { \"owner\": \"anilkdegala\", \"depends_on_past\": True, \"max_active_runs\": 1, \"start_date\":", "dag=dag, ) t_pipeline_check_passed = PythonOperator( task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed, provide_context=True, dag=dag, )", "decrypt_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}} \" t_decrypt_files = BashOperator( task_id='decrypt_files',", "\"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download, Decrypt, Transfer files (Source: S3, Staging: EC2: Target:", "t_pipeline_check_skipped >> t_end_pipeline t_check_pipeline >> t_pipeline_check_passed >> t_download_files >> t_decrypt_files", "print(\"end_pipeline:\") def pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\") def cleanup(**kwargs): dp = DagPebbles() print(\"cleanup\")", "dp = DagPebbles() print(\"end\") with DAG( \"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download, Decrypt, Transfer", "bash_command=download_files_cmd, dag=dag) decrypt_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}} \" t_decrypt_files =", "else: return \"pipeline_check_skipped\" def pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\") def end_pipeline(**kwargs): print(\"end_pipeline:\") def", "t_end = PythonOperator( task_id=\"end\", python_callable=end, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_pipeline_begin", "scripts.dag_pebbles import DagPebbles from airflow.configuration import conf from scripts.configurations import", "task_id=\"check_pipeline\", python_callable=pipeline_enable_check, provide_context=True, dag=dag, ) t_pipeline_check_passed = PythonOperator( task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed,", "for f in files: print(\"download_decrypt_transfer_files: file: \", f['name'], ', location:", "BranchPythonOperator( task_id=\"check_pipeline\", python_callable=pipeline_enable_check, provide_context=True, dag=dag, ) t_pipeline_check_passed = PythonOperator( task_id=\"pipeline_check_passed\",", "provide_context=True, dag=dag, ) t_pipeline_check_passed = PythonOperator( task_id=\"pipeline_check_passed\", python_callable=pipeline_check_passed, provide_context=True, dag=dag,", "date, timedelta, datetime from collections import OrderedDict from scripts.dag_pebbles import", "f['name'], ', location: ', f['path']) output = f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments =", "import * from airflow.operators.dummy_operator import DummyOperator default_args = { \"owner\":", "location: ', f['path']) output = f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments = download_decrypt_arguments +", "transfer_arguments_list.append(DATA_LOCATION + \"/\"+f['final_name']) transfer_arguments = \",\".join(transfer_arguments_list) print(\"final download_decrypt_arguments: \",download_decrypt_arguments) print(\"final", "dag=dag, ) t_notify = PythonOperator( task_id=\"send_notifications\", python_callable=notify, provide_context=True, trigger_rule=\"none_failed\", dag=dag,", "schedule_interval=None, catchup=False, orientation=\"TB\", tags=['Utils'], dagrun_timeout=timedelta(hours=240) ) as dag: t_pipeline_begin =", "python_callable=pipeline_check_skipped, provide_context=True, dag=dag, ) download_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/download_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}}\" t_download_files", "provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_cleanup = PythonOperator( task_id=\"cleanup\", python_callable=cleanup, provide_context=True,", "python_callable=cleanup, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_end = PythonOperator( task_id=\"end\", python_callable=end,", "[] for f in files: print(\"download_decrypt_transfer_files: file: \", f['name'], ',", "True, \"is_paused_upon_creation\": False, } def begin_pipeline(**kwargs): print(\"begin_pipeline:\") files = kwargs['dag_run'].conf.get('files')", "kwargs['dag_run'].conf.get('files') download_decrypt_arguments = '' transfer_arguments_list = [] for f in", "\"pipeline_check_skipped\" def pipeline_check_passed(**kwargs): print(\"pipeline_check_passed:\") def end_pipeline(**kwargs): print(\"end_pipeline:\") def pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\")", "from airflow.operators.dummy_operator import DummyOperator default_args = { \"owner\": \"anilkdegala\", \"depends_on_past\":", "print(\"end\") with DAG( \"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download, Decrypt, Transfer files (Source: S3,", "python_callable=pipeline_check_passed, provide_context=True, dag=dag, ) t_pipeline_check_skipped = PythonOperator( task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped, provide_context=True,", "task_id='download_files', bash_command=download_files_cmd, dag=dag) decrypt_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}} \" t_decrypt_files", "\"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{ ti.xcom_pull(key='transfer_arguments')}} \" t_transfer_files = BashOperator( task_id='transfer_files', bash_command=transfer_files_cmd, dag=dag)", "\" t_transfer_files = BashOperator( task_id='transfer_files', bash_command=transfer_files_cmd, dag=dag) t_end_pipeline = PythonOperator(", ") t_pipeline_begin >> t_check_pipeline t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline t_check_pipeline", "dag=dag, ) t_pipeline_check_skipped = PythonOperator( task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped, provide_context=True, dag=dag, )", "ti.xcom_pull(key='download_decrypt_arguments')}} \" t_decrypt_files = BashOperator( task_id='decrypt_files', bash_command=decrypt_files_cmd, dag=dag) transfer_files_cmd =", "download_decrypt_arguments: \",download_decrypt_arguments) print(\"final transfer_arguments: \",transfer_arguments) kwargs[\"ti\"].xcom_push(key=\"download_decrypt_arguments\", value=download_decrypt_arguments) kwargs[\"ti\"].xcom_push(key=\"transfer_arguments\", value=transfer_arguments) def", "Staging: EC2: Target: RDS Oracle)\", default_args=default_args, schedule_interval=None, catchup=False, orientation=\"TB\", tags=['Utils'],", "task_id='decrypt_files', bash_command=decrypt_files_cmd, dag=dag) transfer_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{ ti.xcom_pull(key='transfer_arguments')}} \" t_transfer_files", "import OrderedDict from scripts.dag_pebbles import DagPebbles from airflow.configuration import conf", "DagPebbles() print(\"end\") with DAG( \"DOWNLOAD_DECRYPT_TRANSFER\", description=\"Download, Decrypt, Transfer files (Source:", "Oracle)\", default_args=default_args, schedule_interval=None, catchup=False, orientation=\"TB\", tags=['Utils'], dagrun_timeout=timedelta(hours=240) ) as dag:", "BashOperator( task_id='download_files', bash_command=download_files_cmd, dag=dag) decrypt_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}} \"", "f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments = download_decrypt_arguments + \" \" + output transfer_arguments_list.append(DATA_LOCATION", "= download_decrypt_arguments + \" \" + output transfer_arguments_list.append(DATA_LOCATION + \"/\"+f['final_name'])", "t_transfer_files = BashOperator( task_id='transfer_files', bash_command=transfer_files_cmd, dag=dag) t_end_pipeline = PythonOperator( task_id=\"end_pipeline\",", "BashOperator( task_id='decrypt_files', bash_command=decrypt_files_cmd, dag=dag) transfer_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl \"+\"{{ ti.xcom_pull(key='transfer_arguments')}} \"", "Transfer files (Source: S3, Staging: EC2: Target: RDS Oracle)\", default_args=default_args,", "airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import date, timedelta, datetime", "dag=dag) decrypt_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}} \" t_decrypt_files = BashOperator(", "\"+\"{{ ti.xcom_pull(key='transfer_arguments')}} \" t_transfer_files = BashOperator( task_id='transfer_files', bash_command=transfer_files_cmd, dag=dag) t_end_pipeline", "airflow import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import", "f in files: print(\"download_decrypt_transfer_files: file: \", f['name'], ', location: ',", "print(\"pipeline_check_passed:\") def end_pipeline(**kwargs): print(\"end_pipeline:\") def pipeline_check_skipped(**kwargs): print(\"pipeline_check_skipped:\") def cleanup(**kwargs): dp", "DagPebbles() if dp.pipeline_enable_check('DATA_LOAD'): return \"pipeline_check_passed\" else: return \"pipeline_check_skipped\" def pipeline_check_passed(**kwargs):", "t_end_pipeline = PythonOperator( task_id=\"end_pipeline\", python_callable=end_pipeline, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_notify", "from datetime import date, timedelta, datetime from collections import OrderedDict", "\"/\"+f['final_name']) transfer_arguments = \",\".join(transfer_arguments_list) print(\"final download_decrypt_arguments: \",download_decrypt_arguments) print(\"final transfer_arguments: \",transfer_arguments)", "tags=['Utils'], dagrun_timeout=timedelta(hours=240) ) as dag: t_pipeline_begin = PythonOperator( task_id=\"begin_pipeline\", python_callable=begin_pipeline,", "import conf from scripts.configurations import * from airflow.operators.dummy_operator import DummyOperator", "} def begin_pipeline(**kwargs): print(\"begin_pipeline:\") files = kwargs['dag_run'].conf.get('files') download_decrypt_arguments = ''", "dp = DagPebbles() if dp.pipeline_enable_check('DATA_LOAD'): return \"pipeline_check_passed\" else: return \"pipeline_check_skipped\"", "print(\"download_decrypt_transfer_files: file: \", f['name'], ', location: ', f['path']) output =", "+ \" \" + output transfer_arguments_list.append(DATA_LOCATION + \"/\"+f['final_name']) transfer_arguments =", "= '' transfer_arguments_list = [] for f in files: print(\"download_decrypt_transfer_files:", "output = f['name']+','+f['path']+','+f['final_name'] download_decrypt_arguments = download_decrypt_arguments + \" \" +", "default_args=default_args, schedule_interval=None, catchup=False, orientation=\"TB\", tags=['Utils'], dagrun_timeout=timedelta(hours=240) ) as dag: t_pipeline_begin", "t_download_files = BashOperator( task_id='download_files', bash_command=download_files_cmd, dag=dag) decrypt_files_cmd = \"/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh \"+\"{{", "t_check_pipeline >> t_pipeline_check_passed >> t_download_files >> t_decrypt_files >> t_transfer_files >>", "scripts.configurations import * from airflow.operators.dummy_operator import DummyOperator default_args = {", "orientation=\"TB\", tags=['Utils'], dagrun_timeout=timedelta(hours=240) ) as dag: t_pipeline_begin = PythonOperator( task_id=\"begin_pipeline\",", "print(\"cleanup\") def notify(**kwargs): dp = DagPebbles() print(\"notify\") def end(**kwargs): dp", "from scripts.configurations import * from airflow.operators.dummy_operator import DummyOperator default_args =", "\"\"\" import os from airflow import DAG from airflow.operators.bash_operator import", ">> t_end_pipeline t_check_pipeline >> t_pipeline_check_passed >> t_download_files >> t_decrypt_files >>", "t_transfer_files >> t_end_pipeline t_end_pipeline >> t_cleanup >> t_notify >> t_end", "dag=dag, ) t_pipeline_begin >> t_check_pipeline t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline", "import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import", "files = kwargs['dag_run'].conf.get('files') download_decrypt_arguments = '' transfer_arguments_list = [] for", "Target: RDS Oracle)\", default_args=default_args, schedule_interval=None, catchup=False, orientation=\"TB\", tags=['Utils'], dagrun_timeout=timedelta(hours=240) )", "t_pipeline_check_skipped = PythonOperator( task_id=\"pipeline_check_skipped\", python_callable=pipeline_check_skipped, provide_context=True, dag=dag, ) download_files_cmd =", "\"+\"{{ ti.xcom_pull(key='download_decrypt_arguments')}} \" t_decrypt_files = BashOperator( task_id='decrypt_files', bash_command=decrypt_files_cmd, dag=dag) transfer_files_cmd", "t_notify = PythonOperator( task_id=\"send_notifications\", python_callable=notify, provide_context=True, trigger_rule=\"none_failed\", dag=dag, ) t_cleanup" ]
[ "a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association( policy_id, endpoint_id=endpoint_id)", "to handle wrapping this policy entity # ourselves. self._add_self_referential_link(context, ref)", "service_id, region_id): \"\"\"Check an association between a policy and region+service.\"\"\"", "association between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association(", "check_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Check an association between a policy", "delete_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Delete an association between", "def check_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Check an association", "this policy entity # ourselves. self._add_self_referential_link(context, ref) return {'policy': ref}", "under the License. from keystone.common import controller from keystone.common import", "endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def delete_policy_association_for_endpoint(self, context,", "association between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association(", "escape. @classmethod def filter_endpoint(cls, ref): if 'legacy_endpoint_id' in ref: ref.pop('legacy_endpoint_id')", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self.endpoint_policy_api.create_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def check_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Check", "between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association( policy_id,", "policy_id, service_id=service_id) @controller.protected() def delete_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Delete an", "self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def get_policy_for_endpoint(self, context,", "specific language governing permissions and limitations # under the License.", "wrap_member(cls, context, ref): ref = cls.filter_endpoint(ref) return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref)", "# not use this file except in compliance with the", "def list_endpoints_for_policy(self, context, policy_id): \"\"\"List endpoints with the effective association", "resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_region( payload['resource_info']) def _on_policy_delete(self, service, resource_type, operation,", "ref): ref = cls.filter_endpoint(ref) return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) @controller.protected() def", "def check_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Check an association between a", "association between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association(", "'catalog_api', 'endpoint_policy_api') class EndpointPolicyV3Controller(controller.V3Controller): collection_name = 'endpoints' member_name = 'endpoint'", "endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def check_policy_association_for_endpoint(self, context,", "in compliance with the License. You may obtain # a", "Copyright 2014 IBM Corp. # # Licensed under the Apache", "import controller from keystone.common import dependency from keystone import notifications", "region_id): \"\"\"Delete an association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id)", "You may obtain # a copy of the License at", "and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id) @controller.protected() def", "endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id) ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) # NOTE(henry-nash): since the collection", "ref} # NOTE(henry-nash): As in the catalog controller, we must", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "since the collection and member for this class is #", "ref) return {'policy': ref} # NOTE(henry-nash): As in the catalog", "an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def delete_policy_association_for_endpoint(self,", "collection and member for this class is # set to", "\"\"\"List endpoints with the effective association to a policy.\"\"\" self.policy_api.get_policy(policy_id)", "self, context, policy_id, service_id, region_id): \"\"\"Delete an association between a", "'legacy_endpoint_id' in ref: ref.pop('legacy_endpoint_id') return ref @classmethod def wrap_member(cls, context,", "under the License is distributed on an \"AS IS\" BASIS,", "def _on_region_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_region( payload['resource_info']) def _on_policy_delete(self,", "= 'endpoints' member_name = 'endpoint' def __init__(self): super(EndpointPolicyV3Controller, self).__init__() notifications.register_event_callback(", "between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association( policy_id,", "@controller.protected() def list_endpoints_for_policy(self, context, policy_id): \"\"\"List endpoints with the effective", "policy_id, endpoint_id): \"\"\"Create an association between a policy and an", "'service', self._on_service_delete) notifications.register_event_callback( 'deleted', 'region', self._on_region_delete) notifications.register_event_callback( 'deleted', 'policy', self._on_policy_delete)", "and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected()", "effective policy for an endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id) ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) #", "import dependency from keystone import notifications @dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') class", "the effective policy for an endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id) ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id)", "keystone.common import dependency from keystone import notifications @dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api')", "service_id=service_id, region_id=region_id) @controller.protected() def delete_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id):", "this file except in compliance with the License. You may", "language governing permissions and limitations # under the License. from", "self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def check_policy_association_for_region_and_service(", "catalog controller, we must ensure that the # legacy_endpoint_id does", "def create_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Create an association", "policy_id, service_id, region_id): \"\"\"Delete an association between a policy and", "= self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) # NOTE(henry-nash): since the collection and member for", "from keystone.common import dependency from keystone import notifications @dependency.requires('policy_api', 'catalog_api',", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "operation, payload): self.endpoint_policy_api.delete_association_by_service( payload['resource_info']) def _on_region_delete(self, service, resource_type, operation, payload):", "self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id) @controller.protected() def create_policy_association_for_region_and_service( self, context, policy_id,", "not escape. @classmethod def filter_endpoint(cls, ref): if 'legacy_endpoint_id' in ref:", "ref.pop('legacy_endpoint_id') return ref @classmethod def wrap_member(cls, context, ref): ref =", "payload): self.endpoint_policy_api.delete_association_by_region( payload['resource_info']) def _on_policy_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_policy(", "Corp. # # Licensed under the Apache License, Version 2.0", "an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def create_policy_association_for_service(self,", "endpoint_id=endpoint_id) @controller.protected() def delete_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Delete an association", "file except in compliance with the License. You may obtain", "OR CONDITIONS OF ANY KIND, either express or implied. See", "the specific language governing permissions and limitations # under the", "policy_id, service_id, region_id): \"\"\"Create an association between a policy and", "a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id,", "self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def check_policy_association_for_region_and_service( self, context, policy_id,", "service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_service( payload['resource_info']) def _on_region_delete(self, service, resource_type,", "under the Apache License, Version 2.0 (the \"License\"); you may", "policy_id, service_id, region_id): \"\"\"Check an association between a policy and", "self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def delete_policy_association_for_region_and_service( self, context,", "keystone.common import controller from keystone.common import dependency from keystone import", "context, policy_id, service_id): \"\"\"Check an association between a policy and", "if 'legacy_endpoint_id' in ref: ref.pop('legacy_endpoint_id') return ref @classmethod def wrap_member(cls,", "self, context, policy_id, service_id, region_id): \"\"\"Create an association between a", "endpoint_id): \"\"\"Check an association between a policy and an endpoint.\"\"\"", "association between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association(", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id) @controller.protected() def check_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Check", "'policy', self._on_policy_delete) def _on_endpoint_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info'])", "delete_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Delete an association between a policy", "self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def get_policy_for_endpoint(self, context, endpoint_id):", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "'endpoint' def __init__(self): super(EndpointPolicyV3Controller, self).__init__() notifications.register_event_callback( 'deleted', 'endpoint', self._on_endpoint_delete) notifications.register_event_callback(", "to in writing, software # distributed under the License is", "\"\"\"Create an association between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id)", "return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) @controller.protected() def list_endpoints_for_policy(self, context, policy_id): \"\"\"List", "controller from keystone.common import dependency from keystone import notifications @dependency.requires('policy_api',", "def _on_endpoint_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info']) def _on_service_delete(self,", "endpoint_id): \"\"\"Delete an association between a policy and an endpoint.\"\"\"", "or agreed to in writing, software # distributed under the", "self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id) @controller.protected() def delete_policy_association_for_service(self, context, policy_id, service_id):", "self, context, policy_id, service_id, region_id): \"\"\"Check an association between a", "ref) @controller.protected() def list_endpoints_for_policy(self, context, policy_id): \"\"\"List endpoints with the", "required by applicable law or agreed to in writing, software", "keystone import notifications @dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') class EndpointPolicyV3Controller(controller.V3Controller): collection_name =", "cls.filter_endpoint(ref) return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) @controller.protected() def list_endpoints_for_policy(self, context, policy_id):", "policy_id, service_id): \"\"\"Create an association between a policy and a", "notifications.register_event_callback( 'deleted', 'service', self._on_service_delete) notifications.register_event_callback( 'deleted', 'region', self._on_region_delete) notifications.register_event_callback( 'deleted',", "check_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Check an association between a policy", "association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association(", "region_id): \"\"\"Check an association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id)", "self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id) @controller.protected() def create_policy_association_for_region_and_service( self, context, policy_id, service_id,", "an association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id)", "Apache License, Version 2.0 (the \"License\"); you may # not", "service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_policy( payload['resource_info']) @controller.protected() def create_policy_association_for_endpoint(self, context,", "self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) # NOTE(henry-nash): since the collection and member for this", "policy_id, endpoint_id=endpoint_id) @controller.protected() def check_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Check an", "agreed to in writing, software # distributed under the License", "super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) @controller.protected() def list_endpoints_for_policy(self, context, policy_id): \"\"\"List endpoints", "self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def check_policy_association_for_endpoint(self, context, policy_id, endpoint_id):", "= cls.filter_endpoint(ref) return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) @controller.protected() def list_endpoints_for_policy(self, context,", "distributed under the License is distributed on an \"AS IS\"", "def filter_endpoint(cls, ref): if 'legacy_endpoint_id' in ref: ref.pop('legacy_endpoint_id') return ref", "policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id) @controller.protected()", "delete_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Delete an association between a policy", "and member for this class is # set to endpoints,", "service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id) @controller.protected() def check_policy_association_for_service(self, context,", "License, Version 2.0 (the \"License\"); you may # not use", "CONDITIONS OF ANY KIND, either express or implied. See the", "ref): if 'legacy_endpoint_id' in ref: ref.pop('legacy_endpoint_id') return ref @classmethod def", "notifications.register_event_callback( 'deleted', 'endpoint', self._on_endpoint_delete) notifications.register_event_callback( 'deleted', 'service', self._on_service_delete) notifications.register_event_callback( 'deleted',", "service_id, region_id): \"\"\"Delete an association between a policy and region+service.\"\"\"", "not use this file except in compliance with the License.", "ensure that the # legacy_endpoint_id does not escape. @classmethod def", "@controller.protected() def create_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Create an", "region_id=region_id) @controller.protected() def delete_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Delete", "policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected()", "writing, software # distributed under the License is distributed on", "endpoint_id): \"\"\"Create an association between a policy and an endpoint.\"\"\"", "notifications @dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') class EndpointPolicyV3Controller(controller.V3Controller): collection_name = 'endpoints' member_name", "region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def", "and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def", "context, policy_id): \"\"\"List endpoints with the effective association to a", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "@controller.protected() def check_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Check an", "the License. You may obtain # a copy of the", "and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id) @controller.protected() def", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "use this file except in compliance with the License. You", "and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected()", "service_id=service_id, region_id=region_id) @controller.protected() def get_policy_for_endpoint(self, context, endpoint_id): \"\"\"Get the effective", "dependency from keystone import notifications @dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') class EndpointPolicyV3Controller(controller.V3Controller):", "an association between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id)", "for this class is # set to endpoints, we have", "import notifications @dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') class EndpointPolicyV3Controller(controller.V3Controller): collection_name = 'endpoints'", "service_id=service_id) @controller.protected() def delete_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Delete an association", "'deleted', 'endpoint', self._on_endpoint_delete) notifications.register_event_callback( 'deleted', 'service', self._on_service_delete) notifications.register_event_callback( 'deleted', 'region',", "service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info']) def _on_service_delete(self, service, resource_type,", "a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association( policy_id, endpoint_id=endpoint_id)", "context, endpoint_id): \"\"\"Get the effective policy for an endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id)", "region_id=region_id) @controller.protected() def check_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Check", "and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def", "\"\"\"Check an association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id)", "@classmethod def wrap_member(cls, context, ref): ref = cls.filter_endpoint(ref) return super(EndpointPolicyV3Controller,", "association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association(", "for an endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id) ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) # NOTE(henry-nash): since", "def wrap_member(cls, context, ref): ref = cls.filter_endpoint(ref) return super(EndpointPolicyV3Controller, cls).wrap_member(context,", "'endpoints' member_name = 'endpoint' def __init__(self): super(EndpointPolicyV3Controller, self).__init__() notifications.register_event_callback( 'deleted',", "\"\"\"Get the effective policy for an endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id) ref =", "\"\"\"Check an association between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id)", "@controller.protected() def create_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Create an association between", "operation, payload): self.endpoint_policy_api.delete_association_by_region( payload['resource_info']) def _on_policy_delete(self, service, resource_type, operation, payload):", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "KIND, either express or implied. See the # License for", "context, policy_id, endpoint_id): \"\"\"Check an association between a policy and", "policy_id, endpoint_id=endpoint_id) @controller.protected() def create_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Create an", "\"License\"); you may # not use this file except in", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "@controller.protected() def delete_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Delete an association between", "create_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Create an association between", "effective association to a policy.\"\"\" self.policy_api.get_policy(policy_id) refs = self.endpoint_policy_api.list_endpoints_for_policy(policy_id) return", "express or implied. See the # License for the specific", "self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def get_policy_for_endpoint(self, context, endpoint_id): \"\"\"Get", "# set to endpoints, we have to handle wrapping this", "# under the License. from keystone.common import controller from keystone.common", "the Apache License, Version 2.0 (the \"License\"); you may #", "self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id) @controller.protected() def create_policy_association_for_region_and_service( self, context,", "policy_id, endpoint_id): \"\"\"Check an association between a policy and an", "payload['resource_info']) def _on_policy_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_policy( payload['resource_info']) @controller.protected()", "association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association(", "See the # License for the specific language governing permissions", "notifications.register_event_callback( 'deleted', 'policy', self._on_policy_delete) def _on_endpoint_delete(self, service, resource_type, operation, payload):", "get_policy_for_endpoint(self, context, endpoint_id): \"\"\"Get the effective policy for an endpoint.\"\"\"", "2014 IBM Corp. # # Licensed under the Apache License,", "operation, payload): self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info']) def _on_service_delete(self, service, resource_type, operation, payload):", "context, policy_id, endpoint_id): \"\"\"Delete an association between a policy and", "@controller.protected() def create_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Create an association between", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "context, policy_id, service_id): \"\"\"Delete an association between a policy and", "self.endpoint_policy_api.delete_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def create_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Create", "policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected()", "service_id): \"\"\"Check an association between a policy and a service.\"\"\"", "def delete_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Delete an association between a", "ourselves. self._add_self_referential_link(context, ref) return {'policy': ref} # NOTE(henry-nash): As in", "NOTE(henry-nash): since the collection and member for this class is", "law or agreed to in writing, software # distributed under", "@classmethod def filter_endpoint(cls, ref): if 'legacy_endpoint_id' in ref: ref.pop('legacy_endpoint_id') return", "@controller.protected() def delete_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Delete an", "payload['resource_info']) @controller.protected() def create_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Create an association", "filter_endpoint(cls, ref): if 'legacy_endpoint_id' in ref: ref.pop('legacy_endpoint_id') return ref @classmethod", "region_id): \"\"\"Create an association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id)", "self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def delete_policy_association_for_region_and_service( self, context, policy_id,", "implied. See the # License for the specific language governing", "self._add_self_referential_link(context, ref) return {'policy': ref} # NOTE(henry-nash): As in the", "region_id=region_id) @controller.protected() def get_policy_for_endpoint(self, context, endpoint_id): \"\"\"Get the effective policy", "'endpoint', self._on_endpoint_delete) notifications.register_event_callback( 'deleted', 'service', self._on_service_delete) notifications.register_event_callback( 'deleted', 'region', self._on_region_delete)", "with the effective association to a policy.\"\"\" self.policy_api.get_policy(policy_id) refs =", "a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id)", "governing permissions and limitations # under the License. from keystone.common", "limitations # under the License. from keystone.common import controller from", "service_id): \"\"\"Create an association between a policy and a service.\"\"\"", "@dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') class EndpointPolicyV3Controller(controller.V3Controller): collection_name = 'endpoints' member_name =", "the effective association to a policy.\"\"\" self.policy_api.get_policy(policy_id) refs = self.endpoint_policy_api.list_endpoints_for_policy(policy_id)", "payload['resource_info']) def _on_service_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_service( payload['resource_info']) def", "ref: ref.pop('legacy_endpoint_id') return ref @classmethod def wrap_member(cls, context, ref): ref", "endpoint_id): \"\"\"Get the effective policy for an endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id) ref", "policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id, region_id=region_id)", "policy_id, service_id=service_id, region_id=region_id) @controller.protected() def get_policy_for_endpoint(self, context, endpoint_id): \"\"\"Get the", "resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_policy( payload['resource_info']) @controller.protected() def create_policy_association_for_endpoint(self, context, policy_id,", "self.endpoint_policy_api.delete_association_by_region( payload['resource_info']) def _on_policy_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_policy( payload['resource_info'])", "self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id) @controller.protected() def delete_policy_association_for_service(self, context, policy_id,", "self._on_endpoint_delete) notifications.register_event_callback( 'deleted', 'service', self._on_service_delete) notifications.register_event_callback( 'deleted', 'region', self._on_region_delete) notifications.register_event_callback(", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "def __init__(self): super(EndpointPolicyV3Controller, self).__init__() notifications.register_event_callback( 'deleted', 'endpoint', self._on_endpoint_delete) notifications.register_event_callback( 'deleted',", "# # Licensed under the Apache License, Version 2.0 (the", "self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def delete_policy_association_for_region_and_service(", "context, ref): ref = cls.filter_endpoint(ref) return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) @controller.protected()", "we have to handle wrapping this policy entity # ourselves.", "class is # set to endpoints, we have to handle", "policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id, region_id=region_id)", "region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def", "obtain # a copy of the License at # #", "service_id, region_id): \"\"\"Create an association between a policy and region+service.\"\"\"", "endpoints with the effective association to a policy.\"\"\" self.policy_api.get_policy(policy_id) refs", "context, policy_id, service_id): \"\"\"Create an association between a policy and", "list_endpoints_for_policy(self, context, policy_id): \"\"\"List endpoints with the effective association to", "\"\"\"Create an association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id)", "Version 2.0 (the \"License\"); you may # not use this", "def delete_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Delete an association", "between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association( policy_id,", "that the # legacy_endpoint_id does not escape. @classmethod def filter_endpoint(cls,", "operation, payload): self.endpoint_policy_api.delete_association_by_policy( payload['resource_info']) @controller.protected() def create_policy_association_for_endpoint(self, context, policy_id, endpoint_id):", "have to handle wrapping this policy entity # ourselves. self._add_self_referential_link(context,", "from keystone.common import controller from keystone.common import dependency from keystone", "License for the specific language governing permissions and limitations #", "ref = cls.filter_endpoint(ref) return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) @controller.protected() def list_endpoints_for_policy(self,", "'deleted', 'region', self._on_region_delete) notifications.register_event_callback( 'deleted', 'policy', self._on_policy_delete) def _on_endpoint_delete(self, service,", "def create_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Create an association between a", "and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected()", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "policy entity # ourselves. self._add_self_referential_link(context, ref) return {'policy': ref} #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association( policy_id, endpoint_id=endpoint_id)", "the catalog controller, we must ensure that the # legacy_endpoint_id", "between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association( policy_id,", "policy_id, endpoint_id): \"\"\"Delete an association between a policy and an", "handle wrapping this policy entity # ourselves. self._add_self_referential_link(context, ref) return", "collection_name = 'endpoints' member_name = 'endpoint' def __init__(self): super(EndpointPolicyV3Controller, self).__init__()", "and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id) @controller.protected() def", "between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association( policy_id,", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "to endpoints, we have to handle wrapping this policy entity", "policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id, region_id=region_id)", "a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id) @controller.protected() def create_policy_association_for_region_and_service(", "self).__init__() notifications.register_event_callback( 'deleted', 'endpoint', self._on_endpoint_delete) notifications.register_event_callback( 'deleted', 'service', self._on_service_delete) notifications.register_event_callback(", "compliance with the License. You may obtain # a copy", "context, policy_id, service_id, region_id): \"\"\"Check an association between a policy", "self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def get_policy_for_endpoint(self,", "policy_id, service_id=service_id, region_id=region_id) @controller.protected() def check_policy_association_for_region_and_service( self, context, policy_id, service_id,", "is # set to endpoints, we have to handle wrapping", "must ensure that the # legacy_endpoint_id does not escape. @classmethod", "self.endpoint_policy_api.check_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def delete_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Delete", "ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) # NOTE(henry-nash): since the collection and member", "super(EndpointPolicyV3Controller, self).__init__() notifications.register_event_callback( 'deleted', 'endpoint', self._on_endpoint_delete) notifications.register_event_callback( 'deleted', 'service', self._on_service_delete)", "_on_service_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_service( payload['resource_info']) def _on_region_delete(self, service,", "policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected()", "the # License for the specific language governing permissions and", "self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def delete_policy_association_for_endpoint(self, context, policy_id,", "# # Unless required by applicable law or agreed to", "IBM Corp. # # Licensed under the Apache License, Version", "permissions and limitations # under the License. from keystone.common import", "\"\"\"Delete an association between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id)", "a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id) @controller.protected() def check_policy_association_for_service(self,", "self._on_region_delete) notifications.register_event_callback( 'deleted', 'policy', self._on_policy_delete) def _on_endpoint_delete(self, service, resource_type, operation,", "a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id)", "notifications.register_event_callback( 'deleted', 'region', self._on_region_delete) notifications.register_event_callback( 'deleted', 'policy', self._on_policy_delete) def _on_endpoint_delete(self,", "check_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Check an association between", "def _on_policy_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_policy( payload['resource_info']) @controller.protected() def", "an association between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id)", "@controller.protected() def check_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Check an association between", "_on_policy_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_policy( payload['resource_info']) @controller.protected() def create_policy_association_for_endpoint(self,", "service_id=service_id) @controller.protected() def create_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id): \"\"\"Create", "create_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Create an association between a policy", "we must ensure that the # legacy_endpoint_id does not escape.", "2.0 (the \"License\"); you may # not use this file", "self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id) @controller.protected() def check_policy_association_for_service(self, context, policy_id, service_id):", "# legacy_endpoint_id does not escape. @classmethod def filter_endpoint(cls, ref): if", "payload['resource_info']) def _on_region_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_region( payload['resource_info']) def", "the License. from keystone.common import controller from keystone.common import dependency", "endpoints, we have to handle wrapping this policy entity #", "policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id) @controller.protected()", "by applicable law or agreed to in writing, software #", "between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association( policy_id,", "policy_id, service_id=service_id) @controller.protected() def create_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id):", "between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association( policy_id,", "# Copyright 2014 IBM Corp. # # Licensed under the", "def get_policy_for_endpoint(self, context, endpoint_id): \"\"\"Get the effective policy for an", "self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def delete_policy_association_for_endpoint(self, context, policy_id, endpoint_id):", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "def _on_service_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_service( payload['resource_info']) def _on_region_delete(self,", "policy_id): \"\"\"List endpoints with the effective association to a policy.\"\"\"", "service_id): \"\"\"Delete an association between a policy and a service.\"\"\"", "\"\"\"Delete an association between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id)", "return ref @classmethod def wrap_member(cls, context, ref): ref = cls.filter_endpoint(ref)", "policy_id, service_id): \"\"\"Check an association between a policy and a", "does not escape. @classmethod def filter_endpoint(cls, ref): if 'legacy_endpoint_id' in", "self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info']) def _on_service_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_service( payload['resource_info'])", "the # legacy_endpoint_id does not escape. @classmethod def filter_endpoint(cls, ref):", "may obtain # a copy of the License at #", "payload): self.endpoint_policy_api.delete_association_by_service( payload['resource_info']) def _on_region_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_region(", "Unless required by applicable law or agreed to in writing,", "self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id) @controller.protected() def check_policy_association_for_service(self, context, policy_id,", "def check_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Check an association between a", "# ourselves. self._add_self_referential_link(context, ref) return {'policy': ref} # NOTE(henry-nash): As", "License. from keystone.common import controller from keystone.common import dependency from", "applicable law or agreed to in writing, software # distributed", "OF ANY KIND, either express or implied. See the #", "self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def check_policy_association_for_endpoint(self, context, policy_id,", "endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def create_policy_association_for_service(self, context,", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "in writing, software # distributed under the License is distributed", "'region', self._on_region_delete) notifications.register_event_callback( 'deleted', 'policy', self._on_policy_delete) def _on_endpoint_delete(self, service, resource_type,", "a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id)", "cls).wrap_member(context, ref) @controller.protected() def list_endpoints_for_policy(self, context, policy_id): \"\"\"List endpoints with", "service_id=service_id) @controller.protected() def check_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Check an association", "between a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association( policy_id,", "member for this class is # set to endpoints, we", "resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_service( payload['resource_info']) def _on_region_delete(self, service, resource_type, operation,", "a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id) @controller.protected() def delete_policy_association_for_service(self,", "service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id) @controller.protected() def delete_policy_association_for_service(self, context,", "self.endpoint_policy_api.delete_association_by_policy( payload['resource_info']) @controller.protected() def create_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Create an", "region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def", "an endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id) ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) # NOTE(henry-nash): since the", "service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_region( payload['resource_info']) def _on_policy_delete(self, service, resource_type,", "__init__(self): super(EndpointPolicyV3Controller, self).__init__() notifications.register_event_callback( 'deleted', 'endpoint', self._on_endpoint_delete) notifications.register_event_callback( 'deleted', 'service',", "a policy and region+service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id,", "return {'policy': ref} # NOTE(henry-nash): As in the catalog controller,", "@controller.protected() def get_policy_for_endpoint(self, context, endpoint_id): \"\"\"Get the effective policy for", "ref @classmethod def wrap_member(cls, context, ref): ref = cls.filter_endpoint(ref) return", "set to endpoints, we have to handle wrapping this policy", "wrapping this policy entity # ourselves. self._add_self_referential_link(context, ref) return {'policy':", "_on_region_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_region( payload['resource_info']) def _on_policy_delete(self, service,", "either express or implied. See the # License for the", "this class is # set to endpoints, we have to", "\"\"\"Create an association between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id)", "def delete_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Delete an association between a", "legacy_endpoint_id does not escape. @classmethod def filter_endpoint(cls, ref): if 'legacy_endpoint_id'", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "may # not use this file except in compliance with", "'endpoint_policy_api') class EndpointPolicyV3Controller(controller.V3Controller): collection_name = 'endpoints' member_name = 'endpoint' def", "self._on_policy_delete) def _on_endpoint_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info']) def", "NOTE(henry-nash): As in the catalog controller, we must ensure that", "# NOTE(henry-nash): As in the catalog controller, we must ensure", "context, policy_id, endpoint_id): \"\"\"Create an association between a policy and", "# License for the specific language governing permissions and limitations", "with the License. You may obtain # a copy of", "= 'endpoint' def __init__(self): super(EndpointPolicyV3Controller, self).__init__() notifications.register_event_callback( 'deleted', 'endpoint', self._on_endpoint_delete)", "{'policy': ref} # NOTE(henry-nash): As in the catalog controller, we", "controller, we must ensure that the # legacy_endpoint_id does not", "you may # not use this file except in compliance", "self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def check_policy_association_for_region_and_service( self,", "to a policy.\"\"\" self.policy_api.get_policy(policy_id) refs = self.endpoint_policy_api.list_endpoints_for_policy(policy_id) return EndpointPolicyV3Controller.wrap_collection(context, refs)", "and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def", "an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.create_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def check_policy_association_for_endpoint(self,", "@controller.protected() def delete_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Delete an association between", "'deleted', 'service', self._on_service_delete) notifications.register_event_callback( 'deleted', 'region', self._on_region_delete) notifications.register_event_callback( 'deleted', 'policy',", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "self.catalog_api.get_service(service_id) self.catalog_api.get_region(region_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def delete_policy_association_for_region_and_service( self,", "policy_id, service_id=service_id, region_id=region_id) @controller.protected() def delete_policy_association_for_region_and_service( self, context, policy_id, service_id,", "create_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Create an association between a policy", "resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info']) def _on_service_delete(self, service, resource_type, operation,", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.create_policy_association( policy_id,", "entity # ourselves. self._add_self_referential_link(context, ref) return {'policy': ref} # NOTE(henry-nash):", "self._on_service_delete) notifications.register_event_callback( 'deleted', 'region', self._on_region_delete) notifications.register_event_callback( 'deleted', 'policy', self._on_policy_delete) def", "self.catalog_api.get_region(region_id) self.endpoint_policy_api.create_policy_association( policy_id, service_id=service_id, region_id=region_id) @controller.protected() def check_policy_association_for_region_and_service( self, context,", "def create_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Create an association between a", "endpoint_id=endpoint_id) @controller.protected() def check_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Check an association", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id) @controller.protected() def delete_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Delete", "self.catalog_api.get_endpoint(endpoint_id) ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) # NOTE(henry-nash): since the collection and", "\"\"\"Delete an association between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id)", "self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def create_policy_association_for_service(self, context, policy_id, service_id):", "and limitations # under the License. from keystone.common import controller", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "association between a policy and an endpoint.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.check_policy_association(", "payload): self.endpoint_policy_api.delete_association_by_policy( payload['resource_info']) @controller.protected() def create_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Create", "# NOTE(henry-nash): since the collection and member for this class", "policy_id, service_id=service_id) @controller.protected() def check_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Check an", "for the specific language governing permissions and limitations # under", "in the catalog controller, we must ensure that the #", "'deleted', 'policy', self._on_policy_delete) def _on_endpoint_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_endpoint(", "context, policy_id, service_id, region_id): \"\"\"Delete an association between a policy", "except in compliance with the License. You may obtain #", "@controller.protected() def check_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Check an association between", "<reponame>hashnfv/hashnfv-moon<gh_stars>0 # Copyright 2014 IBM Corp. # # Licensed under", "from keystone import notifications @dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') class EndpointPolicyV3Controller(controller.V3Controller): collection_name", "member_name = 'endpoint' def __init__(self): super(EndpointPolicyV3Controller, self).__init__() notifications.register_event_callback( 'deleted', 'endpoint',", "endpoint_id=endpoint_id) @controller.protected() def create_policy_association_for_service(self, context, policy_id, service_id): \"\"\"Create an association", "License. You may obtain # a copy of the License", "service_id=service_id, region_id=region_id) @controller.protected() def check_policy_association_for_region_and_service( self, context, policy_id, service_id, region_id):", "ANY KIND, either express or implied. See the # License", "# distributed under the License is distributed on an \"AS", "payload): self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info']) def _on_service_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_service(", "policy for an endpoint.\"\"\" self.catalog_api.get_endpoint(endpoint_id) ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) # NOTE(henry-nash):", "# Unless required by applicable law or agreed to in", "\"\"\"Check an association between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id)", "service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association( policy_id, service_id=service_id) @controller.protected() def create_policy_association_for_region_and_service( self,", "context, policy_id, service_id, region_id): \"\"\"Create an association between a policy", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "self.endpoint_policy_api.delete_association_by_service( payload['resource_info']) def _on_region_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_region( payload['resource_info'])", "class EndpointPolicyV3Controller(controller.V3Controller): collection_name = 'endpoints' member_name = 'endpoint' def __init__(self):", "self.policy_api.get_policy(policy_id) self.catalog_api.get_endpoint(endpoint_id) self.endpoint_policy_api.delete_policy_association( policy_id, endpoint_id=endpoint_id) @controller.protected() def create_policy_association_for_service(self, context, policy_id,", "As in the catalog controller, we must ensure that the", "EndpointPolicyV3Controller(controller.V3Controller): collection_name = 'endpoints' member_name = 'endpoint' def __init__(self): super(EndpointPolicyV3Controller,", "_on_endpoint_delete(self, service, resource_type, operation, payload): self.endpoint_policy_api.delete_association_by_endpoint( payload['resource_info']) def _on_service_delete(self, service,", "the collection and member for this class is # set", "policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.check_policy_association( policy_id, service_id=service_id) @controller.protected()", "association between a policy and a service.\"\"\" self.policy_api.get_policy(policy_id) self.catalog_api.get_service(service_id) self.endpoint_policy_api.delete_policy_association(", "association to a policy.\"\"\" self.policy_api.get_policy(policy_id) refs = self.endpoint_policy_api.list_endpoints_for_policy(policy_id) return EndpointPolicyV3Controller.wrap_collection(context,", "policy_id, endpoint_id=endpoint_id) @controller.protected() def delete_policy_association_for_endpoint(self, context, policy_id, endpoint_id): \"\"\"Delete an", "policy_id, service_id): \"\"\"Delete an association between a policy and a", "or implied. See the # License for the specific language", "in ref: ref.pop('legacy_endpoint_id') return ref @classmethod def wrap_member(cls, context, ref):" ]
[ "plugin config with specific CLI options if nthreads is None", "parser def main(): from ..workflows.base import init_nibetaseries_participant_wf # get commandline", "= loadyml(f) plugin_settings.setdefault('plugin_args', {}) else: # Defaults plugin_settings = {", "be performed ' 'Multiple participant level analyses can be run", "When you import __main__ it will get executed again (as", "get_parser().parse_args() # check inputs if (opts.hrf_model == 'fir') and (opts.fir_delays", "analyses can be run independently ' '(in parallel) using the", "pass filter (Hz)') proc_opts.add_argument('-c', '--confounds', help='The confound column names '", "is None): raise ValueError('If the FIR HRF model is selected,", "a space separated list.') image_opts.add_argument('--session-label', action='store', default=None, help='select a session", "run independently ' '(in parallel) using the same output_dir') parser.add_argument('-v',", "prepopulated with the results of the' 'participant level analysis.') parser.add_argument('analysis_level',", "misc.add_argument('--graph', action='store_true', default=False, help='generates a graph png of the workflow')", "contains ' 'minimally preprocessed img, brainmask, and confounds.tsv') parser.add_argument('output_dir', help='The", "\"region\" ' 'is labeled with the same integer and there", "default='glover', choices=['glover', 'spm', 'fir', 'glover + derivative', 'glover + derivative", "will get executed twice: - When you run `python -m", "included in nuisance regression. ' 'write the confounds you wish", "the workflow') return parser def main(): from ..workflows.base import init_nibetaseries_participant_wf", "raise RuntimeError(\"NiBetaSeries/cli/run.py should not be run directly;\\n\" \"Please `pip install`", "'and files should be stored. If you are running group", "misc = parser.add_argument_group('misc options') misc.add_argument('--graph', action='store_true', default=False, help='generates a graph", "in volumes', metavar='VOL') proc_opts.add_argument('-w', '--work-dir', help='directory where temporary files '", "nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg', simple_form=True) try: nibetaseries_participant_wf.run(**plugin_settings) except RuntimeError as e: if", "base plugin_settings from file if --use-plugin if opts.use_plugin is not", "NotImplementedError('group analysis not currently implemented') def init(): if __name__ ==", "be used') image_opts.add_argument('--description-label', action='store', default=None, help='select a bold file with", "image_opts.add_argument('--description-label', action='store', default=None, help='select a bold file with particular '", "if opts.analysis_level == \"participant\": nibetaseries_participant_wf = init_nibetaseries_participant_wf( estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut),", "import __main__ it will get executed again (as a module)", "to import things from __main__ later, but that will cause", "following hemodynamic response functions') proc_opts.add_argument('--fir-delays', default=None, nargs='+', type=int, help='FIR delays", "running group level analysis ' 'this folder should be prepopulated", "= { 'plugin': 'MultiProc', 'plugin_args': { 'raise_insufficient': False, 'maxtasksperchild': 1,", "} } # Resource management options # Note that we're", "did not execute cleanly\" in str(e): print(\"Workflow did not execute", "you are running group level analysis ' 'this folder should", "elif opts.analysis_level == \"group\": raise NotImplementedError('group analysis not currently implemented')", "selected, ' 'FIR delays must be provided.') # Set up", "\"\"\" from __future__ import absolute_import import os import argparse from", "level analysis.') parser.add_argument('analysis_level', choices=['participant', 'group'], help='Level of the analysis that", "derivative + dispersion'], help='convolve your regressors ' 'with one of", "problems: the code will get executed twice: - When you", "up some sort of versioning system bids_dir = os.path.abspath(opts.bids_dir) derivatives_pipeline_dir", "integer and there is a unique ' 'integer associated with", "implemented') def init(): if __name__ == \"__main__\": raise RuntimeError(\"NiBetaSeries/cli/run.py should", "or '--atlas-img' in sys.argv), help='atlas look up table (tsv) formatted", "from glob import glob from multiprocessing import cpu_count from nipype", "associated with each region of interest.') atlas_args.add_argument('-l', '--atlas-lut', action='store', required=('-a'", "'--confounds', help='The confound column names ' 'that are to be", "else: # Defaults plugin_settings = { 'plugin': 'MultiProc', 'plugin_args': {", "= parser.add_argument_group('Options to handle performance') g_perfm.add_argument('--nthreads', '-n-cpus', action='store', type=int, help='maximum", "in sys.argv or '--atlas-img' in sys.argv), help='atlas look up table", "opts.analysis_level == \"participant\": nibetaseries_participant_wf = init_nibetaseries_participant_wf( estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir,", "``nibetaseries.__main__`` in ``sys.modules``. - When you import __main__ it will", "parser object\"\"\" from ..__init__ import __version__ import sys verstr =", "'crashfile_format': 'txt', 'parameterize_dirs': False}, }) # running participant level if", "the results of the' 'participant level analysis.') parser.add_argument('analysis_level', choices=['participant', 'group'],", "' 'certain nibs finished as expected.') # Image Selection options", "image_opts = parser.add_argument_group('Options for selecting images') parser.add_argument('--participant-label', nargs=\"+\", help='The label(s)", "log_dir, 'log_to_file': True}, 'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt', 'parameterize_dirs': False},", "dataset ' 'formatted according to the BIDS standard.') parser.add_argument('derivatives_pipeline', help='The", "'--atlas-lut' in sys.argv), help='input atlas nifti where each voxel within", "'provided all subjects should be analyzed. Multiple ' 'participants can", "can be specified with a space separated list.') image_opts.add_argument('--session-label', action='store',", "as expected.') # Image Selection options image_opts = parser.add_argument_group('Options for", "-*- coding: utf-8 -*- \"\"\" Module that contains the command", "parser.add_argument('bids_dir', help='The directory with the input dataset ' 'formatted according", "interest.') atlas_args.add_argument('-l', '--atlas-lut', action='store', required=('-a' in sys.argv or '--atlas-img' in", "opts.participant_label # for all subjects else: subject_dirs = glob(os.path.join(bids_dir, \"sub-*\"))", "options opts = get_parser().parse_args() # check inputs if (opts.hrf_model ==", "space to be used') image_opts.add_argument('--description-label', action='store', default=None, help='select a bold", "won't be any ``nibetaseries.__main__`` in ``sys.modules``. - When you import", "(mm)') proc_opts.add_argument('-hp', '--high-pass', action='store', type=float, default=0.0078125, help='high pass filter (Hz)')", "exist, and why not put this in __main__? You might", "\"\"\"Build parser object\"\"\" from ..__init__ import __version__ import sys verstr", "# Load base plugin_settings from file if --use-plugin if opts.use_plugin", "= os.path.join(os.getcwd(), 'nibetaseries_work') os.makedirs(work_dir, exist_ok=True) # only for a subset", "directly;\\n\" \"Please `pip install` NiBetaSeries and use the `nibs` command\")", "not ' 'provided all subjects should be analyzed. Multiple '", "default=None, nargs='+', type=int, help='FIR delays in volumes', metavar='VOL') proc_opts.add_argument('-w', '--work-dir',", "import config as ncfg def get_parser(): \"\"\"Build parser object\"\"\" from", "action='store', default=None, help='select a run to analyze') image_opts.add_argument('-sp', '--space-label', action='store',", "Arguments') atlas_args.add_argument('-a', '--atlas-img', action='store', required=('-l' in sys.argv or '--atlas-lut' in", "import init_nibetaseries_participant_wf # get commandline options opts = get_parser().parse_args() #", "bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays, hrf_model=opts.hrf_model, high_pass=opts.high_pass, output_dir=output_dir, run_label=opts.run_label, selected_confounds=opts.confounds, session_label=opts.session_label,", "nuisance regression. ' 'write the confounds you wish to include", "files). ' 'This directory can be deleted once you are", "plugin args # This may need to be revisited if", "that will cause problems: the code will get executed twice:", "running participant level if opts.analysis_level == \"participant\": nibetaseries_participant_wf = init_nibetaseries_participant_wf(", "else: subject_dirs = glob(os.path.join(bids_dir, \"sub-*\")) subject_list = [subject_dir.split(\"-\")[-1] for subject_dir", "the input dataset ' 'formatted according to the BIDS standard.')", "but that will cause problems: the code will get executed", "default=False, help='generates a graph png of the workflow') return parser", "import load as loadyml with open(opts.use_plugin) as f: plugin_settings =", "for processing') proc_opts.add_argument('--estimator', default='lss', choices=['lss', 'lsa'], help='beta series modeling method')", "If you are running group level analysis ' 'this folder", "init_nibetaseries_participant_wf( estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays, hrf_model=opts.hrf_model, high_pass=opts.high_pass,", "opts.derivatives_pipeline) output_dir = os.path.abspath(opts.output_dir) os.makedirs(output_dir, exist_ok=True) log_dir = os.path.join(output_dir, 'logs')", "# Defaults plugin_settings = { 'plugin': 'MultiProc', 'plugin_args': { 'raise_insufficient':", "from argparse import RawTextHelpFormatter from glob import glob from multiprocessing", "be stored. If you are running group level analysis '", "provided.') # Set up directories # TODO: set up some", "columns: ' 'index, regions which correspond to the regions in", "derivative + dispersion', 'spm + derivative', 'spm + derivative +", "executed twice: - When you run `python -m nibetaseries` python", "default=None, help='select a bold file with particular ' '`desc` label", "system bids_dir = os.path.abspath(opts.bids_dir) derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives', opts.derivatives_pipeline) output_dir", "opts.work_dir: work_dir = os.path.abspath(opts.work_dir) else: work_dir = os.path.join(os.getcwd(), 'nibetaseries_work') os.makedirs(work_dir,", "any ``nibetaseries.__main__`` in ``sys.modules``. - When you import __main__ it", "files ' 'are stored (i.e. non-essential files). ' 'This directory", "' 'that should be analyzed. The label ' 'corresponds to", "default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'], help='select a bold derivative in a specific space", "analysis not currently implemented') def init(): if __name__ == \"__main__\":", "(opts.hrf_model == 'fir') and (opts.fir_delays is None): raise ValueError('If the", "{}) else: # Defaults plugin_settings = { 'plugin': 'MultiProc', 'plugin_args':", "v{}'.format(__version__) parser = argparse.ArgumentParser(description='NiBetaSeries BIDS arguments', formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir', help='The directory", "where temporary files ' 'are stored (i.e. non-essential files). '", "if opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg', simple_form=True) try: nibetaseries_participant_wf.run(**plugin_settings) except RuntimeError as", "spec ' '(so it does not include \"sub-\"). If this", "..workflows.base import init_nibetaseries_participant_wf # get commandline options opts = get_parser().parse_args()", "'spm', 'fir', 'glover + derivative', 'glover + derivative + dispersion',", "label ' 'corresponds to sub-<participant_label> from the BIDS spec '", "from __main__ later, but that will cause problems: the code", "response functions') proc_opts.add_argument('--fir-delays', default=None, nargs='+', type=int, help='FIR delays in volumes',", "parser.add_argument('output_dir', help='The directory where the output directory ' 'and files", "directory where the output directory ' 'and files should be", "to be used') image_opts.add_argument('--description-label', action='store', default=None, help='select a bold file", "f: plugin_settings = loadyml(f) plugin_settings.setdefault('plugin_args', {}) else: # Defaults plugin_settings", "not currently implemented') def init(): if __name__ == \"__main__\": raise", "all subjects else: subject_dirs = glob(os.path.join(bids_dir, \"sub-*\")) subject_list = [subject_dir.split(\"-\")[-1]", "is a unique ' 'integer associated with each region of", ") if opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg', simple_form=True) try: nibetaseries_participant_wf.run(**plugin_settings) except RuntimeError", "glob import glob from multiprocessing import cpu_count from nipype import", "label to process') image_opts.add_argument('--exclude-description-label', action='store_true', default=False, help='exclude this `desc` label", "help='The confound column names ' 'that are to be included", "Set up directories # TODO: set up some sort of", "there's no ``nibetaseries.__main__`` in ``sys.modules``. Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration", "sys.argv or '--atlas-lut' in sys.argv), help='input atlas nifti where each", "correspond to the regions in the ' 'nifti file specified", "nthreads is None or opts.nthreads is not None: nthreads =", "'spm + derivative', 'spm + derivative + dispersion'], help='convolve your", "os.path.abspath(opts.work_dir) else: work_dir = os.path.join(os.getcwd(), 'nibetaseries_work') os.makedirs(work_dir, exist_ok=True) # only", "nthreads = plugin_settings['plugin_args'].get('n_procs') # Permit overriding plugin config with specific", "proc_opts.add_argument('-sm', '--smoothing-kernel', action='store', type=float, default=6.0, help='select a smoothing kernel (mm)')", "with the columns: ' 'index, regions which correspond to the", "of threads across all processes') g_perfm.add_argument('--use-plugin', action='store', default=None, help='nipype plugin", "directory ' 'and files should be stored. If you are", "# Set up directories # TODO: set up some sort", "'spm + derivative + dispersion'], help='convolve your regressors ' 'with", "task to be processed') image_opts.add_argument('--run-label', action='store', default=None, help='select a run", "are to be included in nuisance regression. ' 'write the", "run_label=opts.run_label, selected_confounds=opts.confounds, session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label, subject_list=subject_list, task_label=opts.task_label, description_label=opts.description_label, work_dir=work_dir, )", "+ dispersion'], help='convolve your regressors ' 'with one of the", "os.makedirs(output_dir, exist_ok=True) log_dir = os.path.join(output_dir, 'logs') os.makedirs(log_dir, exist_ok=True) if opts.work_dir:", "opts.nthreads if nthreads is None or nthreads < 1: nthreads", "image_opts.add_argument('--exclude-description-label', action='store_true', default=False, help='exclude this `desc` label from nibetaseries') #", "workflow') return parser def main(): from ..workflows.base import init_nibetaseries_participant_wf #", "= parser.add_argument_group('Options for processing') proc_opts.add_argument('--estimator', default='lss', choices=['lss', 'lsa'], help='beta series", "action='store', default=None, help='select a bold file with particular ' '`desc`", "import RawTextHelpFormatter from glob import glob from multiprocessing import cpu_count", "parser.add_argument_group('Options for processing') proc_opts.add_argument('--estimator', default='lss', choices=['lss', 'lsa'], help='beta series modeling", "} # Resource management options # Note that we're making", "else: raise e elif opts.analysis_level == \"group\": raise NotImplementedError('group analysis", "the confounds you wish to include separated by a space',", "are running group level analysis ' 'this folder should be", "parser.add_argument('-v', '--version', action='version', version=verstr) # Atlas Arguments (Required Options) atlas_args", "the regions in the ' 'nifti file specified by --atlas-img.')", "may need to be revisited if people try to use", "with the same integer and there is a unique '", "be prepopulated with the results of the' 'participant level analysis.')", "parser.add_argument_group('Required Atlas Arguments') atlas_args.add_argument('-a', '--atlas-img', action='store', required=('-l' in sys.argv or", "the command line app. Why does this file exist, and", "of the participant(s) ' 'that should be analyzed. The label", "plugin_settings['plugin_args']['n_procs'] = nthreads # Nipype config (logs and execution) ncfg.update_config({", "opts.analysis_level == \"group\": raise NotImplementedError('group analysis not currently implemented') def", "multiprocessing import cpu_count from nipype import config as ncfg def", "need to be revisited if people try to use batch", "participant level if opts.analysis_level == \"participant\": nibetaseries_participant_wf = init_nibetaseries_participant_wf( estimator=opts.estimator,", "where the output directory ' 'and files should be stored.", "ncfg def get_parser(): \"\"\"Build parser object\"\"\" from ..__init__ import __version__", "subject_dir in subject_dirs] # Nipype plugin configuration # Load base", "did not execute cleanly\") else: raise e elif opts.analysis_level ==", "``sys.modules``. Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\" from __future__ import", "import os import argparse from argparse import RawTextHelpFormatter from glob", "config as ncfg def get_parser(): \"\"\"Build parser object\"\"\" from ..__init__", "__version__ import sys verstr = 'nibs v{}'.format(__version__) parser = argparse.ArgumentParser(description='NiBetaSeries", "' 'index, regions which correspond to the regions in the", "a specific task to be processed') image_opts.add_argument('--run-label', action='store', default=None, help='select", "choices=['MNI152NLin2009cAsym'], help='select a bold derivative in a specific space to", "'that should be analyzed. The label ' 'corresponds to sub-<participant_label>", "str(e): print(\"Workflow did not execute cleanly\") else: raise e elif", "to use batch plugins nthreads = plugin_settings['plugin_args'].get('n_procs') # Permit overriding", "nthreads is None or nthreads < 1: nthreads = cpu_count()", "\"participant\": nibetaseries_participant_wf = init_nibetaseries_participant_wf( estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label,", "should be analyzed. The label ' 'corresponds to sub-<participant_label> from", "__main__? You might be tempted to import things from __main__", "hrf_model=opts.hrf_model, high_pass=opts.high_pass, output_dir=output_dir, run_label=opts.run_label, selected_confounds=opts.confounds, session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label, subject_list=subject_list, task_label=opts.task_label,", "nthreads # Nipype config (logs and execution) ncfg.update_config({ 'logging': {'log_directory':", "object\"\"\" from ..__init__ import __version__ import sys verstr = 'nibs", "a smoothing kernel (mm)') proc_opts.add_argument('-hp', '--high-pass', action='store', type=float, default=0.0078125, help='high", "'derivatives', opts.derivatives_pipeline) output_dir = os.path.abspath(opts.output_dir) os.makedirs(output_dir, exist_ok=True) log_dir = os.path.join(output_dir,", "img, brainmask, and confounds.tsv') parser.add_argument('output_dir', help='The directory where the output", "# Permit overriding plugin config with specific CLI options if", "absolute_import import os import argparse from argparse import RawTextHelpFormatter from", "by a space', nargs=\"+\") proc_opts.add_argument('--hrf-model', default='glover', choices=['glover', 'spm', 'fir', 'glover", "as a script. That means there won't be any ``nibetaseries.__main__``", "if --use-plugin if opts.use_plugin is not None: from yaml import", "http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\" from __future__ import absolute_import import os import argparse", "'formatted according to the BIDS standard.') parser.add_argument('derivatives_pipeline', help='The pipeline that", "os.makedirs(work_dir, exist_ok=True) # only for a subset of subjects if", "type=int, help='FIR delays in volumes', metavar='VOL') proc_opts.add_argument('-w', '--work-dir', help='directory where", "'`desc` label to process') image_opts.add_argument('--exclude-description-label', action='store_true', default=False, help='exclude this `desc`", "RuntimeError as e: if \"Workflow did not execute cleanly\" in", "it will get executed again (as a module) because there's", "output_dir = os.path.abspath(opts.output_dir) os.makedirs(output_dir, exist_ok=True) log_dir = os.path.join(output_dir, 'logs') os.makedirs(log_dir,", "= cpu_count() plugin_settings['plugin_args']['n_procs'] = nthreads # Nipype config (logs and", "'participant level analysis.') parser.add_argument('analysis_level', choices=['participant', 'group'], help='Level of the analysis", "in ``sys.modules``. - When you import __main__ it will get", "sys.argv), help='atlas look up table (tsv) formatted with the columns:", "== 'fir') and (opts.fir_delays is None): raise ValueError('If the FIR", "'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt', 'parameterize_dirs': False}, }) # running", "action='store', required=('-l' in sys.argv or '--atlas-lut' in sys.argv), help='input atlas", "in sys.argv), help='input atlas nifti where each voxel within a", "# get commandline options opts = get_parser().parse_args() # check inputs", "type=float, default=6.0, help='select a smoothing kernel (mm)') proc_opts.add_argument('-hp', '--high-pass', action='store',", "Resource management options # Note that we're making strong assumptions", "help='atlas look up table (tsv) formatted with the columns: '", "this `desc` label from nibetaseries') # performance options g_perfm =", "action='store', default=None, help='select a specific task to be processed') image_opts.add_argument('--run-label',", "script. That means there won't be any ``nibetaseries.__main__`` in ``sys.modules``.", "the' 'participant level analysis.') parser.add_argument('analysis_level', choices=['participant', 'group'], help='Level of the", "number of threads across all processes') g_perfm.add_argument('--use-plugin', action='store', default=None, help='nipype", "if opts.participant_label: subject_list = opts.participant_label # for all subjects else:", "Load base plugin_settings from file if --use-plugin if opts.use_plugin is", "# This may need to be revisited if people try", "= plugin_settings['plugin_args'].get('n_procs') # Permit overriding plugin config with specific CLI", "Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\" from __future__ import absolute_import", "module) because there's no ``nibetaseries.__main__`` in ``sys.modules``. Also see (1)", "\"sub-*\")) subject_list = [subject_dir.split(\"-\")[-1] for subject_dir in subject_dirs] # Nipype", "must be provided.') # Set up directories # TODO: set", "= 'nibs v{}'.format(__version__) parser = argparse.ArgumentParser(description='NiBetaSeries BIDS arguments', formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir',", "= argparse.ArgumentParser(description='NiBetaSeries BIDS arguments', formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir', help='The directory with the", "run directly;\\n\" \"Please `pip install` NiBetaSeries and use the `nibs`", "separated list.') image_opts.add_argument('--session-label', action='store', default=None, help='select a session to analyze')", "estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays, hrf_model=opts.hrf_model, high_pass=opts.high_pass, output_dir=output_dir,", "import absolute_import import os import argparse from argparse import RawTextHelpFormatter", "# Atlas Arguments (Required Options) atlas_args = parser.add_argument_group('Required Atlas Arguments')", "filter (Hz)') proc_opts.add_argument('-c', '--confounds', help='The confound column names ' 'that", "' 'nifti file specified by --atlas-img.') # preprocessing options proc_opts", "a bold file with particular ' '`desc` label to process')", "default=False, help='exclude this `desc` label from nibetaseries') # performance options", "is None or nthreads < 1: nthreads = cpu_count() plugin_settings['plugin_args']['n_procs']", "= [subject_dir.split(\"-\")[-1] for subject_dir in subject_dirs] # Nipype plugin configuration", "raise e elif opts.analysis_level == \"group\": raise NotImplementedError('group analysis not", "use batch plugins nthreads = plugin_settings['plugin_args'].get('n_procs') # Permit overriding plugin", "__main__ later, but that will cause problems: the code will", "open(opts.use_plugin) as f: plugin_settings = loadyml(f) plugin_settings.setdefault('plugin_args', {}) else: #", "RawTextHelpFormatter from glob import glob from multiprocessing import cpu_count from", "inputs if (opts.hrf_model == 'fir') and (opts.fir_delays is None): raise", "help='select a run to analyze') image_opts.add_argument('-sp', '--space-label', action='store', default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'],", "not None: from yaml import load as loadyml with open(opts.use_plugin)", "a \"region\" ' 'is labeled with the same integer and", "exist_ok=True) # only for a subset of subjects if opts.participant_label:", "ncfg.update_config({ 'logging': {'log_directory': log_dir, 'log_to_file': True}, 'execution': {'crashdump_dir': log_dir, 'crashfile_format':", "Nipype config (logs and execution) ncfg.update_config({ 'logging': {'log_directory': log_dir, 'log_to_file':", "not execute cleanly\") else: raise e elif opts.analysis_level == \"group\":", "help='Level of the analysis that will be performed ' 'Multiple", "file specified by --atlas-img.') # preprocessing options proc_opts = parser.add_argument_group('Options", "versioning system bids_dir = os.path.abspath(opts.bids_dir) derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives', opts.derivatives_pipeline)", "this parameter is not ' 'provided all subjects should be", "None or opts.nthreads is not None: nthreads = opts.nthreads if", "-*- \"\"\" Module that contains the command line app. Why", "'glover + derivative + dispersion', 'spm + derivative', 'spm +", "'MultiProc', 'plugin_args': { 'raise_insufficient': False, 'maxtasksperchild': 1, } } #", "not be run directly;\\n\" \"Please `pip install` NiBetaSeries and use", "sort of versioning system bids_dir = os.path.abspath(opts.bids_dir) derivatives_pipeline_dir = os.path.join(bids_dir,", "- When you run `python -m nibetaseries` python will execute", "default=None, help='select a specific task to be processed') image_opts.add_argument('--run-label', action='store',", "see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\" from __future__ import absolute_import import", "unique ' 'integer associated with each region of interest.') atlas_args.add_argument('-l',", "Module that contains the command line app. Why does this", "# performance options g_perfm = parser.add_argument_group('Options to handle performance') g_perfm.add_argument('--nthreads',", "subjects if opts.participant_label: subject_list = opts.participant_label # for all subjects", "< 1: nthreads = cpu_count() plugin_settings['plugin_args']['n_procs'] = nthreads # Nipype", "'plugin_args': { 'raise_insufficient': False, 'maxtasksperchild': 1, } } # Resource", "action='store', type=float, default=0.0078125, help='high pass filter (Hz)') proc_opts.add_argument('-c', '--confounds', help='The", "cpu_count from nipype import config as ncfg def get_parser(): \"\"\"Build", "required=('-a' in sys.argv or '--atlas-img' in sys.argv), help='atlas look up", "in sys.argv), help='atlas look up table (tsv) formatted with the", "help='select a smoothing kernel (mm)') proc_opts.add_argument('-hp', '--high-pass', action='store', type=float, default=0.0078125,", "be provided.') # Set up directories # TODO: set up", "choices=['lss', 'lsa'], help='beta series modeling method') proc_opts.add_argument('-sm', '--smoothing-kernel', action='store', type=float,", "can be run independently ' '(in parallel) using the same", "default=0.0078125, help='high pass filter (Hz)') proc_opts.add_argument('-c', '--confounds', help='The confound column", "the analysis that will be performed ' 'Multiple participant level", "in a specific space to be used') image_opts.add_argument('--description-label', action='store', default=None,", "parser.add_argument('analysis_level', choices=['participant', 'group'], help='Level of the analysis that will be", "be revisited if people try to use batch plugins nthreads", "'with one of the following hemodynamic response functions') proc_opts.add_argument('--fir-delays', default=None,", "parser.add_argument_group('Options to handle performance') g_perfm.add_argument('--nthreads', '-n-cpus', action='store', type=int, help='maximum number", "a subset of subjects if opts.participant_label: subject_list = opts.participant_label #", "__future__ import absolute_import import os import argparse from argparse import", "' 'This directory can be deleted once you are reasonably", "reasonably ' 'certain nibs finished as expected.') # Image Selection", "management options # Note that we're making strong assumptions about", "regression. ' 'write the confounds you wish to include separated", "be deleted once you are reasonably ' 'certain nibs finished", "files should be stored. If you are running group level", "1: nthreads = cpu_count() plugin_settings['plugin_args']['n_procs'] = nthreads # Nipype config", "log_dir = os.path.join(output_dir, 'logs') os.makedirs(log_dir, exist_ok=True) if opts.work_dir: work_dir =", "will cause problems: the code will get executed twice: -", "be specified with a space separated list.') image_opts.add_argument('--session-label', action='store', default=None,", "Selection options image_opts = parser.add_argument_group('Options for selecting images') parser.add_argument('--participant-label', nargs=\"+\",", "nibetaseries') # performance options g_perfm = parser.add_argument_group('Options to handle performance')", "plugin configuration # Load base plugin_settings from file if --use-plugin", "'--high-pass', action='store', type=float, default=0.0078125, help='high pass filter (Hz)') proc_opts.add_argument('-c', '--confounds',", "'(in parallel) using the same output_dir') parser.add_argument('-v', '--version', action='version', version=verstr)", "action='store', default=None, help='select a session to analyze') image_opts.add_argument('-t', '--task-label', action='store',", "`python -m nibetaseries` python will execute ``__main__.py`` as a script.", "dispersion', 'spm + derivative', 'spm + derivative + dispersion'], help='convolve", "is None or opts.nthreads is not None: nthreads = opts.nthreads", "a space', nargs=\"+\") proc_opts.add_argument('--hrf-model', default='glover', choices=['glover', 'spm', 'fir', 'glover +", "folder should be prepopulated with the results of the' 'participant", "used') image_opts.add_argument('--description-label', action='store', default=None, help='select a bold file with particular", "that we're making strong assumptions about valid plugin args #", "import things from __main__ later, but that will cause problems:", "= parser.add_argument_group('Options for selecting images') parser.add_argument('--participant-label', nargs=\"+\", help='The label(s) of", "get executed again (as a module) because there's no ``nibetaseries.__main__``", "atlas_args.add_argument('-a', '--atlas-img', action='store', required=('-l' in sys.argv or '--atlas-lut' in sys.argv),", "and execution) ncfg.update_config({ 'logging': {'log_directory': log_dir, 'log_to_file': True}, 'execution': {'crashdump_dir':", "'logging': {'log_directory': log_dir, 'log_to_file': True}, 'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt',", "things from __main__ later, but that will cause problems: the", "work_dir=work_dir, ) if opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg', simple_form=True) try: nibetaseries_participant_wf.run(**plugin_settings) except", "init(): if __name__ == \"__main__\": raise RuntimeError(\"NiBetaSeries/cli/run.py should not be", "'minimally preprocessed img, brainmask, and confounds.tsv') parser.add_argument('output_dir', help='The directory where", "'--atlas-img' in sys.argv), help='atlas look up table (tsv) formatted with", "will execute ``__main__.py`` as a script. That means there won't", "as ncfg def get_parser(): \"\"\"Build parser object\"\"\" from ..__init__ import", "independently ' '(in parallel) using the same output_dir') parser.add_argument('-v', '--version',", "default=None, help='select a session to analyze') image_opts.add_argument('-t', '--task-label', action='store', default=None,", "in subject_dirs] # Nipype plugin configuration # Load base plugin_settings", "wish to include separated by a space', nargs=\"+\") proc_opts.add_argument('--hrf-model', default='glover',", "+ derivative + dispersion', 'spm + derivative', 'spm + derivative", "of the workflow') return parser def main(): from ..workflows.base import", "execution) ncfg.update_config({ 'logging': {'log_directory': log_dir, 'log_to_file': True}, 'execution': {'crashdump_dir': log_dir,", "up directories # TODO: set up some sort of versioning", "atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays, hrf_model=opts.hrf_model, high_pass=opts.high_pass, output_dir=output_dir, run_label=opts.run_label, selected_confounds=opts.confounds,", "options if nthreads is None or opts.nthreads is not None:", "of versioning system bids_dir = os.path.abspath(opts.bids_dir) derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives',", "choices=['glover', 'spm', 'fir', 'glover + derivative', 'glover + derivative +", "currently implemented') def init(): if __name__ == \"__main__\": raise RuntimeError(\"NiBetaSeries/cli/run.py", "--atlas-img.') # preprocessing options proc_opts = parser.add_argument_group('Options for processing') proc_opts.add_argument('--estimator',", "run to analyze') image_opts.add_argument('-sp', '--space-label', action='store', default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'], help='select a", "default=6.0, help='select a smoothing kernel (mm)') proc_opts.add_argument('-hp', '--high-pass', action='store', type=float,", "# only for a subset of subjects if opts.participant_label: subject_list", "help='nipype plugin configuration file') # misc options misc = parser.add_argument_group('misc", "that contains the command line app. Why does this file", "== \"group\": raise NotImplementedError('group analysis not currently implemented') def init():", "formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir', help='The directory with the input dataset ' 'formatted", "exist_ok=True) log_dir = os.path.join(output_dir, 'logs') os.makedirs(log_dir, exist_ok=True) if opts.work_dir: work_dir", "}) # running participant level if opts.analysis_level == \"participant\": nibetaseries_participant_wf", "action='version', version=verstr) # Atlas Arguments (Required Options) atlas_args = parser.add_argument_group('Required", "work_dir = os.path.abspath(opts.work_dir) else: work_dir = os.path.join(os.getcwd(), 'nibetaseries_work') os.makedirs(work_dir, exist_ok=True)", "= nthreads # Nipype config (logs and execution) ncfg.update_config({ 'logging':", "file') # misc options misc = parser.add_argument_group('misc options') misc.add_argument('--graph', action='store_true',", "'are stored (i.e. non-essential files). ' 'This directory can be", "'fir') and (opts.fir_delays is None): raise ValueError('If the FIR HRF", "app. Why does this file exist, and why not put", "subject_dirs] # Nipype plugin configuration # Load base plugin_settings from", "# running participant level if opts.analysis_level == \"participant\": nibetaseries_participant_wf =", "be any ``nibetaseries.__main__`` in ``sys.modules``. - When you import __main__", "Options) atlas_args = parser.add_argument_group('Required Atlas Arguments') atlas_args.add_argument('-a', '--atlas-img', action='store', required=('-l'", "python will execute ``__main__.py`` as a script. That means there", "1, } } # Resource management options # Note that", "strong assumptions about valid plugin args # This may need", "print(\"Workflow did not execute cleanly\") else: raise e elif opts.analysis_level", "+ derivative', 'spm + derivative + dispersion'], help='convolve your regressors", "file exist, and why not put this in __main__? You", "about valid plugin args # This may need to be", "within a \"region\" ' 'is labeled with the same integer", "regions which correspond to the regions in the ' 'nifti", "action='store_true', default=False, help='generates a graph png of the workflow') return", "# -*- coding: utf-8 -*- \"\"\" Module that contains the", "in nuisance regression. ' 'write the confounds you wish to", "argparse import RawTextHelpFormatter from glob import glob from multiprocessing import", "and there is a unique ' 'integer associated with each", "sys.argv), help='input atlas nifti where each voxel within a \"region\"", "'(so it does not include \"sub-\"). If this parameter is", "this in __main__? You might be tempted to import things", "method') proc_opts.add_argument('-sm', '--smoothing-kernel', action='store', type=float, default=6.0, help='select a smoothing kernel", "plugin_settings.setdefault('plugin_args', {}) else: # Defaults plugin_settings = { 'plugin': 'MultiProc',", "args # This may need to be revisited if people", "if nthreads is None or nthreads < 1: nthreads =", "options proc_opts = parser.add_argument_group('Options for processing') proc_opts.add_argument('--estimator', default='lss', choices=['lss', 'lsa'],", "you run `python -m nibetaseries` python will execute ``__main__.py`` as", "= os.path.abspath(opts.bids_dir) derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives', opts.derivatives_pipeline) output_dir = os.path.abspath(opts.output_dir)", "options') misc.add_argument('--graph', action='store_true', default=False, help='generates a graph png of the", "brainmask, and confounds.tsv') parser.add_argument('output_dir', help='The directory where the output directory", "performed ' 'Multiple participant level analyses can be run independently", "nthreads = opts.nthreads if nthreads is None or nthreads <", "Atlas Arguments') atlas_args.add_argument('-a', '--atlas-img', action='store', required=('-l' in sys.argv or '--atlas-lut'", "label from nibetaseries') # performance options g_perfm = parser.add_argument_group('Options to", "specific CLI options if nthreads is None or opts.nthreads is", "' 'corresponds to sub-<participant_label> from the BIDS spec ' '(so", "= os.path.abspath(opts.work_dir) else: work_dir = os.path.join(os.getcwd(), 'nibetaseries_work') os.makedirs(work_dir, exist_ok=True) #", "space', nargs=\"+\") proc_opts.add_argument('--hrf-model', default='glover', choices=['glover', 'spm', 'fir', 'glover + derivative',", "parameter is not ' 'provided all subjects should be analyzed.", "{ 'plugin': 'MultiProc', 'plugin_args': { 'raise_insufficient': False, 'maxtasksperchild': 1, }", "not None: nthreads = opts.nthreads if nthreads is None or", "or '--atlas-lut' in sys.argv), help='input atlas nifti where each voxel", "a script. That means there won't be any ``nibetaseries.__main__`` in", "action='store', type=float, default=6.0, help='select a smoothing kernel (mm)') proc_opts.add_argument('-hp', '--high-pass',", "all subjects should be analyzed. Multiple ' 'participants can be", "of interest.') atlas_args.add_argument('-l', '--atlas-lut', action='store', required=('-a' in sys.argv or '--atlas-img'", "for subject_dir in subject_dirs] # Nipype plugin configuration # Load", "why not put this in __main__? You might be tempted", "analysis that will be performed ' 'Multiple participant level analyses", "' '(so it does not include \"sub-\"). If this parameter", "stored (i.e. non-essential files). ' 'This directory can be deleted", "type=int, help='maximum number of threads across all processes') g_perfm.add_argument('--use-plugin', action='store',", "os import argparse from argparse import RawTextHelpFormatter from glob import", "options # Note that we're making strong assumptions about valid", "a bold derivative in a specific space to be used')", "analyzed. Multiple ' 'participants can be specified with a space", "of subjects if opts.participant_label: subject_list = opts.participant_label # for all", "'--space-label', action='store', default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'], help='select a bold derivative in a", "people try to use batch plugins nthreads = plugin_settings['plugin_args'].get('n_procs') #", "according to the BIDS standard.') parser.add_argument('derivatives_pipeline', help='The pipeline that contains", "import __version__ import sys verstr = 'nibs v{}'.format(__version__) parser =", "FIR HRF model is selected, ' 'FIR delays must be", "model is selected, ' 'FIR delays must be provided.') #", "derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives', opts.derivatives_pipeline) output_dir = os.path.abspath(opts.output_dir) os.makedirs(output_dir, exist_ok=True)", "' 'write the confounds you wish to include separated by", "subset of subjects if opts.participant_label: subject_list = opts.participant_label # for", "action='store_true', default=False, help='exclude this `desc` label from nibetaseries') # performance", "processing') proc_opts.add_argument('--estimator', default='lss', choices=['lss', 'lsa'], help='beta series modeling method') proc_opts.add_argument('-sm',", "pipeline that contains ' 'minimally preprocessed img, brainmask, and confounds.tsv')", "False}, }) # running participant level if opts.analysis_level == \"participant\":", "same integer and there is a unique ' 'integer associated", "options misc = parser.add_argument_group('misc options') misc.add_argument('--graph', action='store_true', default=False, help='generates a", "some sort of versioning system bids_dir = os.path.abspath(opts.bids_dir) derivatives_pipeline_dir =", "atlas nifti where each voxel within a \"region\" ' 'is", "help='select a specific task to be processed') image_opts.add_argument('--run-label', action='store', default=None,", "(i.e. non-essential files). ' 'This directory can be deleted once", "as e: if \"Workflow did not execute cleanly\" in str(e):", "'is labeled with the same integer and there is a", "task_label=opts.task_label, description_label=opts.description_label, work_dir=work_dir, ) if opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg', simple_form=True) try:", "include separated by a space', nargs=\"+\") proc_opts.add_argument('--hrf-model', default='glover', choices=['glover', 'spm',", "labeled with the same integer and there is a unique", "action='store', default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'], help='select a bold derivative in a specific", "False, 'maxtasksperchild': 1, } } # Resource management options #", "description_label=opts.description_label, work_dir=work_dir, ) if opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg', simple_form=True) try: nibetaseries_participant_wf.run(**plugin_settings)", "of the' 'participant level analysis.') parser.add_argument('analysis_level', choices=['participant', 'group'], help='Level of", "is not None: from yaml import load as loadyml with", "look up table (tsv) formatted with the columns: ' 'index,", "help='generates a graph png of the workflow') return parser def", "== \"__main__\": raise RuntimeError(\"NiBetaSeries/cli/run.py should not be run directly;\\n\" \"Please", "``nibetaseries.__main__`` in ``sys.modules``. Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\" from", "should be prepopulated with the results of the' 'participant level", "g_perfm.add_argument('--use-plugin', action='store', default=None, help='nipype plugin configuration file') # misc options", "' '`desc` label to process') image_opts.add_argument('--exclude-description-label', action='store_true', default=False, help='exclude this", "' 'FIR delays must be provided.') # Set up directories", "else: work_dir = os.path.join(os.getcwd(), 'nibetaseries_work') os.makedirs(work_dir, exist_ok=True) # only for", "preprocessing options proc_opts = parser.add_argument_group('Options for processing') proc_opts.add_argument('--estimator', default='lss', choices=['lss',", "does not include \"sub-\"). If this parameter is not '", "= opts.nthreads if nthreads is None or nthreads < 1:", "to analyze') image_opts.add_argument('-t', '--task-label', action='store', default=None, help='select a specific task", "main(): from ..workflows.base import init_nibetaseries_participant_wf # get commandline options opts", "+ derivative', 'glover + derivative + dispersion', 'spm + derivative',", "import glob from multiprocessing import cpu_count from nipype import config", "#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\" Module that", "will get executed again (as a module) because there's no", "= opts.participant_label # for all subjects else: subject_dirs = glob(os.path.join(bids_dir,", "misc options misc = parser.add_argument_group('misc options') misc.add_argument('--graph', action='store_true', default=False, help='generates", "(Hz)') proc_opts.add_argument('-c', '--confounds', help='The confound column names ' 'that are", "parser.add_argument_group('Options for selecting images') parser.add_argument('--participant-label', nargs=\"+\", help='The label(s) of the", "if (opts.hrf_model == 'fir') and (opts.fir_delays is None): raise ValueError('If", "- When you import __main__ it will get executed again", "help='The directory where the output directory ' 'and files should", "subjects else: subject_dirs = glob(os.path.join(bids_dir, \"sub-*\")) subject_list = [subject_dir.split(\"-\")[-1] for", "sys.argv or '--atlas-img' in sys.argv), help='atlas look up table (tsv)", "True}, 'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt', 'parameterize_dirs': False}, }) #", "later, but that will cause problems: the code will get", "help='The directory with the input dataset ' 'formatted according to", "be included in nuisance regression. ' 'write the confounds you", "help='select a bold derivative in a specific space to be", "None: from yaml import load as loadyml with open(opts.use_plugin) as", "making strong assumptions about valid plugin args # This may", "atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays, hrf_model=opts.hrf_model, high_pass=opts.high_pass, output_dir=output_dir, run_label=opts.run_label,", "should be stored. If you are running group level analysis", "{'log_directory': log_dir, 'log_to_file': True}, 'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt', 'parameterize_dirs':", "{ 'raise_insufficient': False, 'maxtasksperchild': 1, } } # Resource management", "# Image Selection options image_opts = parser.add_argument_group('Options for selecting images')", "help='maximum number of threads across all processes') g_perfm.add_argument('--use-plugin', action='store', default=None,", "the FIR HRF model is selected, ' 'FIR delays must", "`desc` label from nibetaseries') # performance options g_perfm = parser.add_argument_group('Options", "threads across all processes') g_perfm.add_argument('--use-plugin', action='store', default=None, help='nipype plugin configuration", "a module) because there's no ``nibetaseries.__main__`` in ``sys.modules``. Also see", "once you are reasonably ' 'certain nibs finished as expected.')", "The label ' 'corresponds to sub-<participant_label> from the BIDS spec", "specified by --atlas-img.') # preprocessing options proc_opts = parser.add_argument_group('Options for", "of the analysis that will be performed ' 'Multiple participant", "'parameterize_dirs': False}, }) # running participant level if opts.analysis_level ==", "functions') proc_opts.add_argument('--fir-delays', default=None, nargs='+', type=int, help='FIR delays in volumes', metavar='VOL')", "get commandline options opts = get_parser().parse_args() # check inputs if", "' 'minimally preprocessed img, brainmask, and confounds.tsv') parser.add_argument('output_dir', help='The directory", "__name__ == \"__main__\": raise RuntimeError(\"NiBetaSeries/cli/run.py should not be run directly;\\n\"", "help='beta series modeling method') proc_opts.add_argument('-sm', '--smoothing-kernel', action='store', type=float, default=6.0, help='select", "you are reasonably ' 'certain nibs finished as expected.') #", "arguments', formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir', help='The directory with the input dataset '", "(as a module) because there's no ``nibetaseries.__main__`` in ``sys.modules``. Also", "'corresponds to sub-<participant_label> from the BIDS spec ' '(so it", "include \"sub-\"). If this parameter is not ' 'provided all", "type=float, default=0.0078125, help='high pass filter (Hz)') proc_opts.add_argument('-c', '--confounds', help='The confound", "a session to analyze') image_opts.add_argument('-t', '--task-label', action='store', default=None, help='select a", "commandline options opts = get_parser().parse_args() # check inputs if (opts.hrf_model", "to be included in nuisance regression. ' 'write the confounds", "line app. Why does this file exist, and why not", "expected.') # Image Selection options image_opts = parser.add_argument_group('Options for selecting", "image_opts.add_argument('--run-label', action='store', default=None, help='select a run to analyze') image_opts.add_argument('-sp', '--space-label',", "\"\"\" Module that contains the command line app. Why does", "= os.path.abspath(opts.output_dir) os.makedirs(output_dir, exist_ok=True) log_dir = os.path.join(output_dir, 'logs') os.makedirs(log_dir, exist_ok=True)", "with the results of the' 'participant level analysis.') parser.add_argument('analysis_level', choices=['participant',", "standard.') parser.add_argument('derivatives_pipeline', help='The pipeline that contains ' 'minimally preprocessed img,", "choices=['participant', 'group'], help='Level of the analysis that will be performed", "If this parameter is not ' 'provided all subjects should", "'--atlas-lut', action='store', required=('-a' in sys.argv or '--atlas-img' in sys.argv), help='atlas", "specific space to be used') image_opts.add_argument('--description-label', action='store', default=None, help='select a", "'nibetaseries_work') os.makedirs(work_dir, exist_ok=True) # only for a subset of subjects", "which correspond to the regions in the ' 'nifti file", "handle performance') g_perfm.add_argument('--nthreads', '-n-cpus', action='store', type=int, help='maximum number of threads", "names ' 'that are to be included in nuisance regression.", "Permit overriding plugin config with specific CLI options if nthreads", "session to analyze') image_opts.add_argument('-t', '--task-label', action='store', default=None, help='select a specific", "version=verstr) # Atlas Arguments (Required Options) atlas_args = parser.add_argument_group('Required Atlas", "from ..workflows.base import init_nibetaseries_participant_wf # get commandline options opts =", "help='select a bold file with particular ' '`desc` label to", "should not be run directly;\\n\" \"Please `pip install` NiBetaSeries and", "(Required Options) atlas_args = parser.add_argument_group('Required Atlas Arguments') atlas_args.add_argument('-a', '--atlas-img', action='store',", "import cpu_count from nipype import config as ncfg def get_parser():", "nibetaseries` python will execute ``__main__.py`` as a script. That means", "selecting images') parser.add_argument('--participant-label', nargs=\"+\", help='The label(s) of the participant(s) '", "with particular ' '`desc` label to process') image_opts.add_argument('--exclude-description-label', action='store_true', default=False,", "' 'Multiple participant level analyses can be run independently '", "cause problems: the code will get executed twice: - When", "output directory ' 'and files should be stored. If you", "proc_opts.add_argument('-c', '--confounds', help='The confound column names ' 'that are to", "be processed') image_opts.add_argument('--run-label', action='store', default=None, help='select a run to analyze')", "region of interest.') atlas_args.add_argument('-l', '--atlas-lut', action='store', required=('-a' in sys.argv or", "if \"Workflow did not execute cleanly\" in str(e): print(\"Workflow did", "the same output_dir') parser.add_argument('-v', '--version', action='version', version=verstr) # Atlas Arguments", "(1) from http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\" from __future__ import absolute_import import os", "opts.nthreads is not None: nthreads = opts.nthreads if nthreads is", "ValueError('If the FIR HRF model is selected, ' 'FIR delays", "'--task-label', action='store', default=None, help='select a specific task to be processed')", "list.') image_opts.add_argument('--session-label', action='store', default=None, help='select a session to analyze') image_opts.add_argument('-t',", "' 'participants can be specified with a space separated list.')", "e elif opts.analysis_level == \"group\": raise NotImplementedError('group analysis not currently", "= parser.add_argument_group('Required Atlas Arguments') atlas_args.add_argument('-a', '--atlas-img', action='store', required=('-l' in sys.argv", "image_opts.add_argument('-sp', '--space-label', action='store', default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'], help='select a bold derivative in", "opts.participant_label: subject_list = opts.participant_label # for all subjects else: subject_dirs", "BIDS spec ' '(so it does not include \"sub-\"). If", "to the BIDS standard.') parser.add_argument('derivatives_pipeline', help='The pipeline that contains '", "we're making strong assumptions about valid plugin args # This", "participant(s) ' 'that should be analyzed. The label ' 'corresponds", "you wish to include separated by a space', nargs=\"+\") proc_opts.add_argument('--hrf-model',", "from nipype import config as ncfg def get_parser(): \"\"\"Build parser", "command line app. Why does this file exist, and why", "analysis ' 'this folder should be prepopulated with the results", "= os.path.join(output_dir, 'logs') os.makedirs(log_dir, exist_ok=True) if opts.work_dir: work_dir = os.path.abspath(opts.work_dir)", "subject_list = opts.participant_label # for all subjects else: subject_dirs =", "analyze') image_opts.add_argument('-t', '--task-label', action='store', default=None, help='select a specific task to", "the BIDS standard.') parser.add_argument('derivatives_pipeline', help='The pipeline that contains ' 'minimally", "a graph png of the workflow') return parser def main():", "= parser.add_argument_group('misc options') misc.add_argument('--graph', action='store_true', default=False, help='generates a graph png", "bids_dir = os.path.abspath(opts.bids_dir) derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives', opts.derivatives_pipeline) output_dir =", "sub-<participant_label> from the BIDS spec ' '(so it does not", "there is a unique ' 'integer associated with each region", "options g_perfm = parser.add_argument_group('Options to handle performance') g_perfm.add_argument('--nthreads', '-n-cpus', action='store',", "non-essential files). ' 'This directory can be deleted once you", "help='high pass filter (Hz)') proc_opts.add_argument('-c', '--confounds', help='The confound column names", "not execute cleanly\" in str(e): print(\"Workflow did not execute cleanly\")", "confound column names ' 'that are to be included in", "each region of interest.') atlas_args.add_argument('-l', '--atlas-lut', action='store', required=('-a' in sys.argv", "subjects should be analyzed. Multiple ' 'participants can be specified", "get executed twice: - When you run `python -m nibetaseries`", "Atlas Arguments (Required Options) atlas_args = parser.add_argument_group('Required Atlas Arguments') atlas_args.add_argument('-a',", "derivative', 'spm + derivative + dispersion'], help='convolve your regressors '", "process') image_opts.add_argument('--exclude-description-label', action='store_true', default=False, help='exclude this `desc` label from nibetaseries')", "performance') g_perfm.add_argument('--nthreads', '-n-cpus', action='store', type=int, help='maximum number of threads across", "-m nibetaseries` python will execute ``__main__.py`` as a script. That", "if nthreads is None or opts.nthreads is not None: nthreads", "\"__main__\": raise RuntimeError(\"NiBetaSeries/cli/run.py should not be run directly;\\n\" \"Please `pip", "processes') g_perfm.add_argument('--use-plugin', action='store', default=None, help='nipype plugin configuration file') # misc", "parser.add_argument('derivatives_pipeline', help='The pipeline that contains ' 'minimally preprocessed img, brainmask,", "TODO: set up some sort of versioning system bids_dir =", "with specific CLI options if nthreads is None or opts.nthreads", "output_dir=output_dir, run_label=opts.run_label, selected_confounds=opts.confounds, session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label, subject_list=subject_list, task_label=opts.task_label, description_label=opts.description_label, work_dir=work_dir,", "'certain nibs finished as expected.') # Image Selection options image_opts", "to the regions in the ' 'nifti file specified by", "' 'formatted according to the BIDS standard.') parser.add_argument('derivatives_pipeline', help='The pipeline", "one of the following hemodynamic response functions') proc_opts.add_argument('--fir-delays', default=None, nargs='+',", "Note that we're making strong assumptions about valid plugin args", "os.path.abspath(opts.bids_dir) derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives', opts.derivatives_pipeline) output_dir = os.path.abspath(opts.output_dir) os.makedirs(output_dir,", "help='FIR delays in volumes', metavar='VOL') proc_opts.add_argument('-w', '--work-dir', help='directory where temporary", "batch plugins nthreads = plugin_settings['plugin_args'].get('n_procs') # Permit overriding plugin config", "os.makedirs(log_dir, exist_ok=True) if opts.work_dir: work_dir = os.path.abspath(opts.work_dir) else: work_dir =", "be run directly;\\n\" \"Please `pip install` NiBetaSeries and use the", "g_perfm = parser.add_argument_group('Options to handle performance') g_perfm.add_argument('--nthreads', '-n-cpus', action='store', type=int,", "help='exclude this `desc` label from nibetaseries') # performance options g_perfm", "the code will get executed twice: - When you run", "'--smoothing-kernel', action='store', type=float, default=6.0, help='select a smoothing kernel (mm)') proc_opts.add_argument('-hp',", "run `python -m nibetaseries` python will execute ``__main__.py`` as a", "+ dispersion', 'spm + derivative', 'spm + derivative + dispersion'],", "directory can be deleted once you are reasonably ' 'certain", "opts = get_parser().parse_args() # check inputs if (opts.hrf_model == 'fir')", "if opts.work_dir: work_dir = os.path.abspath(opts.work_dir) else: work_dir = os.path.join(os.getcwd(), 'nibetaseries_work')", "coding: utf-8 -*- \"\"\" Module that contains the command line", "selected_confounds=opts.confounds, session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label, subject_list=subject_list, task_label=opts.task_label, description_label=opts.description_label, work_dir=work_dir, ) if", "proc_opts.add_argument('-hp', '--high-pass', action='store', type=float, default=0.0078125, help='high pass filter (Hz)') proc_opts.add_argument('-c',", "Nipype plugin configuration # Load base plugin_settings from file if", "from file if --use-plugin if opts.use_plugin is not None: from", "for a subset of subjects if opts.participant_label: subject_list = opts.participant_label", "of the following hemodynamic response functions') proc_opts.add_argument('--fir-delays', default=None, nargs='+', type=int,", "python # -*- coding: utf-8 -*- \"\"\" Module that contains", "set up some sort of versioning system bids_dir = os.path.abspath(opts.bids_dir)", "execute cleanly\" in str(e): print(\"Workflow did not execute cleanly\") else:", "\"sub-\"). If this parameter is not ' 'provided all subjects", "separated by a space', nargs=\"+\") proc_opts.add_argument('--hrf-model', default='glover', choices=['glover', 'spm', 'fir',", "parallel) using the same output_dir') parser.add_argument('-v', '--version', action='version', version=verstr) #", "import argparse from argparse import RawTextHelpFormatter from glob import glob", "plugin configuration file') # misc options misc = parser.add_argument_group('misc options')", "in __main__? You might be tempted to import things from", "the columns: ' 'index, regions which correspond to the regions", "configuration # Load base plugin_settings from file if --use-plugin if", "subject_list = [subject_dir.split(\"-\")[-1] for subject_dir in subject_dirs] # Nipype plugin", "opts.use_plugin is not None: from yaml import load as loadyml", "'fir', 'glover + derivative', 'glover + derivative + dispersion', 'spm", "# misc options misc = parser.add_argument_group('misc options') misc.add_argument('--graph', action='store_true', default=False,", "valid plugin args # This may need to be revisited", "cleanly\") else: raise e elif opts.analysis_level == \"group\": raise NotImplementedError('group", "argparse from argparse import RawTextHelpFormatter from glob import glob from", "= glob(os.path.join(bids_dir, \"sub-*\")) subject_list = [subject_dir.split(\"-\")[-1] for subject_dir in subject_dirs]", "\"group\": raise NotImplementedError('group analysis not currently implemented') def init(): if", "help='The pipeline that contains ' 'minimally preprocessed img, brainmask, and", "images') parser.add_argument('--participant-label', nargs=\"+\", help='The label(s) of the participant(s) ' 'that", "# Note that we're making strong assumptions about valid plugin", "only for a subset of subjects if opts.participant_label: subject_list =", "glob(os.path.join(bids_dir, \"sub-*\")) subject_list = [subject_dir.split(\"-\")[-1] for subject_dir in subject_dirs] #", "same output_dir') parser.add_argument('-v', '--version', action='version', version=verstr) # Atlas Arguments (Required", "derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays, hrf_model=opts.hrf_model, high_pass=opts.high_pass, output_dir=output_dir, run_label=opts.run_label, selected_confounds=opts.confounds, session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel,", "def init(): if __name__ == \"__main__\": raise RuntimeError(\"NiBetaSeries/cli/run.py should not", "in the ' 'nifti file specified by --atlas-img.') # preprocessing", "level if opts.analysis_level == \"participant\": nibetaseries_participant_wf = init_nibetaseries_participant_wf( estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img),", "load as loadyml with open(opts.use_plugin) as f: plugin_settings = loadyml(f)", "e: if \"Workflow did not execute cleanly\" in str(e): print(\"Workflow", "file if --use-plugin if opts.use_plugin is not None: from yaml", "that contains ' 'minimally preprocessed img, brainmask, and confounds.tsv') parser.add_argument('output_dir',", "(logs and execution) ncfg.update_config({ 'logging': {'log_directory': log_dir, 'log_to_file': True}, 'execution':", "results of the' 'participant level analysis.') parser.add_argument('analysis_level', choices=['participant', 'group'], help='Level", "temporary files ' 'are stored (i.e. non-essential files). ' 'This", "twice: - When you run `python -m nibetaseries` python will", "return parser def main(): from ..workflows.base import init_nibetaseries_participant_wf # get", "init_nibetaseries_participant_wf # get commandline options opts = get_parser().parse_args() # check", "check inputs if (opts.hrf_model == 'fir') and (opts.fir_delays is None):", "plugin_settings = loadyml(f) plugin_settings.setdefault('plugin_args', {}) else: # Defaults plugin_settings =", "'maxtasksperchild': 1, } } # Resource management options # Note", "cleanly\" in str(e): print(\"Workflow did not execute cleanly\") else: raise", "(opts.fir_delays is None): raise ValueError('If the FIR HRF model is", "revisited if people try to use batch plugins nthreads =", "and confounds.tsv') parser.add_argument('output_dir', help='The directory where the output directory '", "None): raise ValueError('If the FIR HRF model is selected, '", "\"Workflow did not execute cleanly\" in str(e): print(\"Workflow did not", "[subject_dir.split(\"-\")[-1] for subject_dir in subject_dirs] # Nipype plugin configuration #", "'integer associated with each region of interest.') atlas_args.add_argument('-l', '--atlas-lut', action='store',", "or nthreads < 1: nthreads = cpu_count() plugin_settings['plugin_args']['n_procs'] = nthreads", "nifti where each voxel within a \"region\" ' 'is labeled", "parser.add_argument('--participant-label', nargs=\"+\", help='The label(s) of the participant(s) ' 'that should", "nipype import config as ncfg def get_parser(): \"\"\"Build parser object\"\"\"", "or opts.nthreads is not None: nthreads = opts.nthreads if nthreads", "stored. If you are running group level analysis ' 'this", "required=('-l' in sys.argv or '--atlas-lut' in sys.argv), help='input atlas nifti", "' 'with one of the following hemodynamic response functions') proc_opts.add_argument('--fir-delays',", "with a space separated list.') image_opts.add_argument('--session-label', action='store', default=None, help='select a", "the output directory ' 'and files should be stored. If", "'nibs v{}'.format(__version__) parser = argparse.ArgumentParser(description='NiBetaSeries BIDS arguments', formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir', help='The", "table (tsv) formatted with the columns: ' 'index, regions which", "level analyses can be run independently ' '(in parallel) using", "code will get executed twice: - When you run `python", "modeling method') proc_opts.add_argument('-sm', '--smoothing-kernel', action='store', type=float, default=6.0, help='select a smoothing", "label(s) of the participant(s) ' 'that should be analyzed. The", "metavar='VOL') proc_opts.add_argument('-w', '--work-dir', help='directory where temporary files ' 'are stored", "particular ' '`desc` label to process') image_opts.add_argument('--exclude-description-label', action='store_true', default=False, help='exclude", "CLI options if nthreads is None or opts.nthreads is not", "proc_opts.add_argument('--hrf-model', default='glover', choices=['glover', 'spm', 'fir', 'glover + derivative', 'glover +", "smoothing kernel (mm)') proc_opts.add_argument('-hp', '--high-pass', action='store', type=float, default=0.0078125, help='high pass", "be analyzed. The label ' 'corresponds to sub-<participant_label> from the", "smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label, subject_list=subject_list, task_label=opts.task_label, description_label=opts.description_label, work_dir=work_dir, ) if opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored',", "raise ValueError('If the FIR HRF model is selected, ' 'FIR", "regions in the ' 'nifti file specified by --atlas-img.') #", "'logs') os.makedirs(log_dir, exist_ok=True) if opts.work_dir: work_dir = os.path.abspath(opts.work_dir) else: work_dir", "proc_opts.add_argument('--estimator', default='lss', choices=['lss', 'lsa'], help='beta series modeling method') proc_opts.add_argument('-sm', '--smoothing-kernel',", "analysis.') parser.add_argument('analysis_level', choices=['participant', 'group'], help='Level of the analysis that will", "__main__ it will get executed again (as a module) because", "proc_opts = parser.add_argument_group('Options for processing') proc_opts.add_argument('--estimator', default='lss', choices=['lss', 'lsa'], help='beta", "+ derivative + dispersion'], help='convolve your regressors ' 'with one", "no ``nibetaseries.__main__`` in ``sys.modules``. Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\"", "'--version', action='version', version=verstr) # Atlas Arguments (Required Options) atlas_args =", "kernel (mm)') proc_opts.add_argument('-hp', '--high-pass', action='store', type=float, default=0.0078125, help='high pass filter", "This may need to be revisited if people try to", "help='input atlas nifti where each voxel within a \"region\" '", "if __name__ == \"__main__\": raise RuntimeError(\"NiBetaSeries/cli/run.py should not be run", "cpu_count() plugin_settings['plugin_args']['n_procs'] = nthreads # Nipype config (logs and execution)", "raise NotImplementedError('group analysis not currently implemented') def init(): if __name__", "processed') image_opts.add_argument('--run-label', action='store', default=None, help='select a run to analyze') image_opts.add_argument('-sp',", "atlas_args = parser.add_argument_group('Required Atlas Arguments') atlas_args.add_argument('-a', '--atlas-img', action='store', required=('-l' in", "derivative in a specific space to be used') image_opts.add_argument('--description-label', action='store',", "up table (tsv) formatted with the columns: ' 'index, regions", "sys verstr = 'nibs v{}'.format(__version__) parser = argparse.ArgumentParser(description='NiBetaSeries BIDS arguments',", "input dataset ' 'formatted according to the BIDS standard.') parser.add_argument('derivatives_pipeline',", "'This directory can be deleted once you are reasonably '", "not include \"sub-\"). If this parameter is not ' 'provided", "to process') image_opts.add_argument('--exclude-description-label', action='store_true', default=False, help='exclude this `desc` label from", "# Nipype plugin configuration # Load base plugin_settings from file", "--use-plugin if opts.use_plugin is not None: from yaml import load", "except RuntimeError as e: if \"Workflow did not execute cleanly\"", "performance options g_perfm = parser.add_argument_group('Options to handle performance') g_perfm.add_argument('--nthreads', '-n-cpus',", "hemodynamic response functions') proc_opts.add_argument('--fir-delays', default=None, nargs='+', type=int, help='FIR delays in", "be run independently ' '(in parallel) using the same output_dir')", "deleted once you are reasonably ' 'certain nibs finished as", "Multiple ' 'participants can be specified with a space separated", "preprocessed img, brainmask, and confounds.tsv') parser.add_argument('output_dir', help='The directory where the", "work_dir = os.path.join(os.getcwd(), 'nibetaseries_work') os.makedirs(work_dir, exist_ok=True) # only for a", "directory with the input dataset ' 'formatted according to the", "utf-8 -*- \"\"\" Module that contains the command line app.", "'nifti file specified by --atlas-img.') # preprocessing options proc_opts =", "Image Selection options image_opts = parser.add_argument_group('Options for selecting images') parser.add_argument('--participant-label',", "'lsa'], help='beta series modeling method') proc_opts.add_argument('-sm', '--smoothing-kernel', action='store', type=float, default=6.0,", "try to use batch plugins nthreads = plugin_settings['plugin_args'].get('n_procs') # Permit", "not put this in __main__? You might be tempted to", "atlas_args.add_argument('-l', '--atlas-lut', action='store', required=('-a' in sys.argv or '--atlas-img' in sys.argv),", "specific task to be processed') image_opts.add_argument('--run-label', action='store', default=None, help='select a", "is not None: nthreads = opts.nthreads if nthreads is None", "the participant(s) ' 'that should be analyzed. The label '", "confounds you wish to include separated by a space', nargs=\"+\")", "volumes', metavar='VOL') proc_opts.add_argument('-w', '--work-dir', help='directory where temporary files ' 'are", "using the same output_dir') parser.add_argument('-v', '--version', action='version', version=verstr) # Atlas", "executed again (as a module) because there's no ``nibetaseries.__main__`` in", "the same integer and there is a unique ' 'integer", "nthreads < 1: nthreads = cpu_count() plugin_settings['plugin_args']['n_procs'] = nthreads #", "'log_to_file': True}, 'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt', 'parameterize_dirs': False}, })", "if opts.use_plugin is not None: from yaml import load as", "for selecting images') parser.add_argument('--participant-label', nargs=\"+\", help='The label(s) of the participant(s)", "where each voxel within a \"region\" ' 'is labeled with", "os.path.join(os.getcwd(), 'nibetaseries_work') os.makedirs(work_dir, exist_ok=True) # only for a subset of", "bold derivative in a specific space to be used') image_opts.add_argument('--description-label',", "options image_opts = parser.add_argument_group('Options for selecting images') parser.add_argument('--participant-label', nargs=\"+\", help='The", "a run to analyze') image_opts.add_argument('-sp', '--space-label', action='store', default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'], help='select", "nibetaseries_participant_wf.run(**plugin_settings) except RuntimeError as e: if \"Workflow did not execute", "to sub-<participant_label> from the BIDS spec ' '(so it does", "regressors ' 'with one of the following hemodynamic response functions')", "HRF model is selected, ' 'FIR delays must be provided.')", "from nibetaseries') # performance options g_perfm = parser.add_argument_group('Options to handle", "can be deleted once you are reasonably ' 'certain nibs", "argparse.ArgumentParser(description='NiBetaSeries BIDS arguments', formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir', help='The directory with the input", "and (opts.fir_delays is None): raise ValueError('If the FIR HRF model", "'write the confounds you wish to include separated by a", "from multiprocessing import cpu_count from nipype import config as ncfg", "will be performed ' 'Multiple participant level analyses can be", "format='svg', simple_form=True) try: nibetaseries_participant_wf.run(**plugin_settings) except RuntimeError as e: if \"Workflow", "loadyml(f) plugin_settings.setdefault('plugin_args', {}) else: # Defaults plugin_settings = { 'plugin':", "action='store', type=int, help='maximum number of threads across all processes') g_perfm.add_argument('--use-plugin',", "again (as a module) because there's no ``nibetaseries.__main__`` in ``sys.modules``.", "from __future__ import absolute_import import os import argparse from argparse", "a unique ' 'integer associated with each region of interest.')", "you import __main__ it will get executed again (as a", "help='convolve your regressors ' 'with one of the following hemodynamic", "get_parser(): \"\"\"Build parser object\"\"\" from ..__init__ import __version__ import sys", "action='store', required=('-a' in sys.argv or '--atlas-img' in sys.argv), help='atlas look", "default=None, help='nipype plugin configuration file') # misc options misc =", "= get_parser().parse_args() # check inputs if (opts.hrf_model == 'fir') and", "is selected, ' 'FIR delays must be provided.') # Set", "'Multiple participant level analyses can be run independently ' '(in", "parser = argparse.ArgumentParser(description='NiBetaSeries BIDS arguments', formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir', help='The directory with", "config with specific CLI options if nthreads is None or", "fir_delays=opts.fir_delays, hrf_model=opts.hrf_model, high_pass=opts.high_pass, output_dir=output_dir, run_label=opts.run_label, selected_confounds=opts.confounds, session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label, subject_list=subject_list,", "finished as expected.') # Image Selection options image_opts = parser.add_argument_group('Options", "loadyml with open(opts.use_plugin) as f: plugin_settings = loadyml(f) plugin_settings.setdefault('plugin_args', {})", "delays must be provided.') # Set up directories # TODO:", "glob from multiprocessing import cpu_count from nipype import config as", "== \"participant\": nibetaseries_participant_wf = init_nibetaseries_participant_wf( estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir,", "high_pass=opts.high_pass, output_dir=output_dir, run_label=opts.run_label, selected_confounds=opts.confounds, session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label, subject_list=subject_list, task_label=opts.task_label, description_label=opts.description_label,", "yaml import load as loadyml with open(opts.use_plugin) as f: plugin_settings", "from http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\" from __future__ import absolute_import import os import", "' 'and files should be stored. If you are running", "across all processes') g_perfm.add_argument('--use-plugin', action='store', default=None, help='nipype plugin configuration file')", "os.path.join(bids_dir, 'derivatives', opts.derivatives_pipeline) output_dir = os.path.abspath(opts.output_dir) os.makedirs(output_dir, exist_ok=True) log_dir =", "' 'this folder should be prepopulated with the results of", "' 'integer associated with each region of interest.') atlas_args.add_argument('-l', '--atlas-lut',", "should be analyzed. Multiple ' 'participants can be specified with", "execute ``__main__.py`` as a script. That means there won't be", "might be tempted to import things from __main__ later, but", "# for all subjects else: subject_dirs = glob(os.path.join(bids_dir, \"sub-*\")) subject_list", "``sys.modules``. - When you import __main__ it will get executed", "When you run `python -m nibetaseries` python will execute ``__main__.py``", "the BIDS spec ' '(so it does not include \"sub-\").", "from yaml import load as loadyml with open(opts.use_plugin) as f:", "nargs='+', type=int, help='FIR delays in volumes', metavar='VOL') proc_opts.add_argument('-w', '--work-dir', help='directory", "directories # TODO: set up some sort of versioning system", "means there won't be any ``nibetaseries.__main__`` in ``sys.modules``. - When", "your regressors ' 'with one of the following hemodynamic response", "' 'provided all subjects should be analyzed. Multiple ' 'participants", "there won't be any ``nibetaseries.__main__`` in ``sys.modules``. - When you", "= init_nibetaseries_participant_wf( estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays, hrf_model=opts.hrf_model,", "simple_form=True) try: nibetaseries_participant_wf.run(**plugin_settings) except RuntimeError as e: if \"Workflow did", "(tsv) formatted with the columns: ' 'index, regions which correspond", "'this folder should be prepopulated with the results of the'", "file with particular ' '`desc` label to process') image_opts.add_argument('--exclude-description-label', action='store_true',", "BIDS standard.') parser.add_argument('derivatives_pipeline', help='The pipeline that contains ' 'minimally preprocessed", "' 'are stored (i.e. non-essential files). ' 'This directory can", "from ..__init__ import __version__ import sys verstr = 'nibs v{}'.format(__version__)", "participant level analyses can be run independently ' '(in parallel)", "image_opts.add_argument('-t', '--task-label', action='store', default=None, help='select a specific task to be", "for all subjects else: subject_dirs = glob(os.path.join(bids_dir, \"sub-*\")) subject_list =", "configuration file') # misc options misc = parser.add_argument_group('misc options') misc.add_argument('--graph',", "proc_opts.add_argument('-w', '--work-dir', help='directory where temporary files ' 'are stored (i.e.", "nibs finished as expected.') # Image Selection options image_opts =", "that will be performed ' 'Multiple participant level analyses can", "= os.path.join(bids_dir, 'derivatives', opts.derivatives_pipeline) output_dir = os.path.abspath(opts.output_dir) os.makedirs(output_dir, exist_ok=True) log_dir", "'group'], help='Level of the analysis that will be performed '", "nargs=\"+\") proc_opts.add_argument('--hrf-model', default='glover', choices=['glover', 'spm', 'fir', 'glover + derivative', 'glover", "bold file with particular ' '`desc` label to process') image_opts.add_argument('--exclude-description-label',", "'that are to be included in nuisance regression. ' 'write", "the following hemodynamic response functions') proc_opts.add_argument('--fir-delays', default=None, nargs='+', type=int, help='FIR", "default=None, help='select a run to analyze') image_opts.add_argument('-sp', '--space-label', action='store', default='MNI152NLin2009cAsym',", "to handle performance') g_perfm.add_argument('--nthreads', '-n-cpus', action='store', type=int, help='maximum number of", "# Resource management options # Note that we're making strong", "in ``sys.modules``. Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration \"\"\" from __future__", "are reasonably ' 'certain nibs finished as expected.') # Image", "from the BIDS spec ' '(so it does not include", "execute cleanly\") else: raise e elif opts.analysis_level == \"group\": raise", "each voxel within a \"region\" ' 'is labeled with the", "verstr = 'nibs v{}'.format(__version__) parser = argparse.ArgumentParser(description='NiBetaSeries BIDS arguments', formatter_class=RawTextHelpFormatter)", "to be processed') image_opts.add_argument('--run-label', action='store', default=None, help='select a run to", "You might be tempted to import things from __main__ later,", "series modeling method') proc_opts.add_argument('-sm', '--smoothing-kernel', action='store', type=float, default=6.0, help='select a", "a specific space to be used') image_opts.add_argument('--description-label', action='store', default=None, help='select", "with open(opts.use_plugin) as f: plugin_settings = loadyml(f) plugin_settings.setdefault('plugin_args', {}) else:", "in str(e): print(\"Workflow did not execute cleanly\") else: raise e", "with the input dataset ' 'formatted according to the BIDS", "output_dir') parser.add_argument('-v', '--version', action='version', version=verstr) # Atlas Arguments (Required Options)", "'glover + derivative', 'glover + derivative + dispersion', 'spm +", "analyze') image_opts.add_argument('-sp', '--space-label', action='store', default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'], help='select a bold derivative", "None: nthreads = opts.nthreads if nthreads is None or nthreads", "# preprocessing options proc_opts = parser.add_argument_group('Options for processing') proc_opts.add_argument('--estimator', default='lss',", "put this in __main__? You might be tempted to import", "BIDS arguments', formatter_class=RawTextHelpFormatter) parser.add_argument('bids_dir', help='The directory with the input dataset", "to include separated by a space', nargs=\"+\") proc_opts.add_argument('--hrf-model', default='glover', choices=['glover',", "delays in volumes', metavar='VOL') proc_opts.add_argument('-w', '--work-dir', help='directory where temporary files", "analyzed. The label ' 'corresponds to sub-<participant_label> from the BIDS", "all processes') g_perfm.add_argument('--use-plugin', action='store', default=None, help='nipype plugin configuration file') #", "subject_list=subject_list, task_label=opts.task_label, description_label=opts.description_label, work_dir=work_dir, ) if opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg', simple_form=True)", "voxel within a \"region\" ' 'is labeled with the same", "'--atlas-img', action='store', required=('-l' in sys.argv or '--atlas-lut' in sys.argv), help='input", "# check inputs if (opts.hrf_model == 'fir') and (opts.fir_delays is", "formatted with the columns: ' 'index, regions which correspond to", "be analyzed. Multiple ' 'participants can be specified with a", "it does not include \"sub-\"). If this parameter is not", "import sys verstr = 'nibs v{}'.format(__version__) parser = argparse.ArgumentParser(description='NiBetaSeries BIDS", "help='select a session to analyze') image_opts.add_argument('-t', '--task-label', action='store', default=None, help='select", "png of the workflow') return parser def main(): from ..workflows.base", "to be revisited if people try to use batch plugins", "nthreads = cpu_count() plugin_settings['plugin_args']['n_procs'] = nthreads # Nipype config (logs", "session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label, subject_list=subject_list, task_label=opts.task_label, description_label=opts.description_label, work_dir=work_dir, ) if opts.graph:", "try: nibetaseries_participant_wf.run(**plugin_settings) except RuntimeError as e: if \"Workflow did not", "with each region of interest.') atlas_args.add_argument('-l', '--atlas-lut', action='store', required=('-a' in", "'index, regions which correspond to the regions in the '", "specified with a space separated list.') image_opts.add_argument('--session-label', action='store', default=None, help='select", "subject_dirs = glob(os.path.join(bids_dir, \"sub-*\")) subject_list = [subject_dir.split(\"-\")[-1] for subject_dir in", "parser.add_argument_group('misc options') misc.add_argument('--graph', action='store_true', default=False, help='generates a graph png of", "derivative', 'glover + derivative + dispersion', 'spm + derivative', 'spm", "dispersion'], help='convolve your regressors ' 'with one of the following", "group level analysis ' 'this folder should be prepopulated with", "help='The label(s) of the participant(s) ' 'that should be analyzed.", "Defaults plugin_settings = { 'plugin': 'MultiProc', 'plugin_args': { 'raise_insufficient': False,", "as loadyml with open(opts.use_plugin) as f: plugin_settings = loadyml(f) plugin_settings.setdefault('plugin_args',", "def get_parser(): \"\"\"Build parser object\"\"\" from ..__init__ import __version__ import", "'txt', 'parameterize_dirs': False}, }) # running participant level if opts.analysis_level", "' '(in parallel) using the same output_dir') parser.add_argument('-v', '--version', action='version',", "'participants can be specified with a space separated list.') image_opts.add_argument('--session-label',", "def main(): from ..workflows.base import init_nibetaseries_participant_wf # get commandline options", "default='lss', choices=['lss', 'lsa'], help='beta series modeling method') proc_opts.add_argument('-sm', '--smoothing-kernel', action='store',", "space_label=opts.space_label, subject_list=subject_list, task_label=opts.task_label, description_label=opts.description_label, work_dir=work_dir, ) if opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg',", "nargs=\"+\", help='The label(s) of the participant(s) ' 'that should be", "plugin_settings from file if --use-plugin if opts.use_plugin is not None:", "exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays, hrf_model=opts.hrf_model, high_pass=opts.high_pass, output_dir=output_dir, run_label=opts.run_label, selected_confounds=opts.confounds, session_label=opts.session_label, smoothing_kernel=opts.smoothing_kernel, space_label=opts.space_label,", "Why does this file exist, and why not put this", "the ' 'nifti file specified by --atlas-img.') # preprocessing options", "contains the command line app. Why does this file exist,", "space separated list.') image_opts.add_argument('--session-label', action='store', default=None, help='select a session to", "image_opts.add_argument('--session-label', action='store', default=None, help='select a session to analyze') image_opts.add_argument('-t', '--task-label',", "if people try to use batch plugins nthreads = plugin_settings['plugin_args'].get('n_procs')", "'FIR delays must be provided.') # Set up directories #", "column names ' 'that are to be included in nuisance", "to analyze') image_opts.add_argument('-sp', '--space-label', action='store', default='MNI152NLin2009cAsym', choices=['MNI152NLin2009cAsym'], help='select a bold", "That means there won't be any ``nibetaseries.__main__`` in ``sys.modules``. -", "{'crashdump_dir': log_dir, 'crashfile_format': 'txt', 'parameterize_dirs': False}, }) # running participant", "' 'that are to be included in nuisance regression. '", "None or nthreads < 1: nthreads = cpu_count() plugin_settings['plugin_args']['n_procs'] =", "action='store', default=None, help='nipype plugin configuration file') # misc options misc", "does this file exist, and why not put this in", "\"Please `pip install` NiBetaSeries and use the `nibs` command\") init()", "because there's no ``nibetaseries.__main__`` in ``sys.modules``. Also see (1) from", "is not ' 'provided all subjects should be analyzed. Multiple", "graph png of the workflow') return parser def main(): from", "# TODO: set up some sort of versioning system bids_dir", "os.path.abspath(opts.output_dir) os.makedirs(output_dir, exist_ok=True) log_dir = os.path.join(output_dir, 'logs') os.makedirs(log_dir, exist_ok=True) if", "'raise_insufficient': False, 'maxtasksperchild': 1, } } # Resource management options", "be tempted to import things from __main__ later, but that", "g_perfm.add_argument('--nthreads', '-n-cpus', action='store', type=int, help='maximum number of threads across all", "Arguments (Required Options) atlas_args = parser.add_argument_group('Required Atlas Arguments') atlas_args.add_argument('-a', '--atlas-img',", "# Nipype config (logs and execution) ncfg.update_config({ 'logging': {'log_directory': log_dir,", "overriding plugin config with specific CLI options if nthreads is", "by --atlas-img.') # preprocessing options proc_opts = parser.add_argument_group('Options for processing')", "'-n-cpus', action='store', type=int, help='maximum number of threads across all processes')", "help='directory where temporary files ' 'are stored (i.e. non-essential files).", "this file exist, and why not put this in __main__?", "level analysis ' 'this folder should be prepopulated with the", "confounds.tsv') parser.add_argument('output_dir', help='The directory where the output directory ' 'and", "'--work-dir', help='directory where temporary files ' 'are stored (i.e. non-essential", "assumptions about valid plugin args # This may need to", "and why not put this in __main__? You might be", "exist_ok=True) if opts.work_dir: work_dir = os.path.abspath(opts.work_dir) else: work_dir = os.path.join(os.getcwd(),", "'plugin': 'MultiProc', 'plugin_args': { 'raise_insufficient': False, 'maxtasksperchild': 1, } }", "nibetaseries_participant_wf = init_nibetaseries_participant_wf( estimator=opts.estimator, atlas_img=os.path.abspath(opts.atlas_img), atlas_lut=os.path.abspath(opts.atlas_lut), bids_dir=bids_dir, derivatives_pipeline_dir=derivatives_pipeline_dir, exclude_description_label=opts.exclude_description_label, fir_delays=opts.fir_delays,", "in sys.argv or '--atlas-lut' in sys.argv), help='input atlas nifti where", "config (logs and execution) ncfg.update_config({ 'logging': {'log_directory': log_dir, 'log_to_file': True},", "``__main__.py`` as a script. That means there won't be any", "as f: plugin_settings = loadyml(f) plugin_settings.setdefault('plugin_args', {}) else: # Defaults", "plugin_settings = { 'plugin': 'MultiProc', 'plugin_args': { 'raise_insufficient': False, 'maxtasksperchild':", "plugin_settings['plugin_args'].get('n_procs') # Permit overriding plugin config with specific CLI options", "os.path.join(output_dir, 'logs') os.makedirs(log_dir, exist_ok=True) if opts.work_dir: work_dir = os.path.abspath(opts.work_dir) else:", "log_dir, 'crashfile_format': 'txt', 'parameterize_dirs': False}, }) # running participant level", "RuntimeError(\"NiBetaSeries/cli/run.py should not be run directly;\\n\" \"Please `pip install` NiBetaSeries", "' 'is labeled with the same integer and there is", "..__init__ import __version__ import sys verstr = 'nibs v{}'.format(__version__) parser", "tempted to import things from __main__ later, but that will", "proc_opts.add_argument('--fir-delays', default=None, nargs='+', type=int, help='FIR delays in volumes', metavar='VOL') proc_opts.add_argument('-w',", "opts.graph: nibetaseries_participant_wf.write_graph(graph2use='colored', format='svg', simple_form=True) try: nibetaseries_participant_wf.run(**plugin_settings) except RuntimeError as e:", "plugins nthreads = plugin_settings['plugin_args'].get('n_procs') # Permit overriding plugin config with" ]
[ "from .const import DOMAIN from .pysenz import PreAPI class OAuth2FlowHandler(", "extra_authorize_data(self) -> dict: \"\"\"Extra data that needs to be appended", "async def async_step_reauth( self, entry: dict[str, Any] | None =", "errors={}, ) persistent_notification.async_dismiss(self.hass, \"senz_reauth\") return await self.async_step_user() async def async_oauth_create_entry(self,", "-> dict: \"\"\"Create an oauth config entry or update existing", "existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=data) await self.hass.config_entries.async_reload(existing_entry.entry_id) return self.async_abort(reason=\"reauth_successful\") return self.async_create_entry(title=account, data=data)", "\"\"\"Perform reauth upon an API authentication error.\"\"\" self.entry = entry", "reauth.\"\"\" pre_api = PreAPI(self.hass) resp = await pre_api.getAccount(data[\"token\"][\"access_token\"]) account =", "from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const", "| None = None ) -> FlowResult: \"\"\"Perform reauth upon", "logger.\"\"\" return logging.getLogger(__name__) @property def extra_authorize_data(self) -> dict: \"\"\"Extra data", "persistent_notification.async_dismiss(self.hass, \"senz_reauth\") return await self.async_step_user() async def async_oauth_create_entry(self, data: dict)", "\"scope\": \"restapi offline_access\", } async def async_step_reauth( self, entry: dict[str,", "import voluptuous as vol from homeassistant.components import persistent_notification from homeassistant.data_entry_flow", "to re-configure it.\", \"Senz re-authentication\", \"senz_reauth\", ) return await self.async_step_reauth_confirm()", "user_input is None: return self.async_show_form( step_id=\"reauth_confirm\", description_placeholders={\"account\": self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}), errors={},", "class OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ): \"\"\"Config flow to handle SENZ", "homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN from .pysenz import", "): \"\"\"Config flow to handle SENZ WiFi OAuth2 authentication.\"\"\" DOMAIN", "SENZ WiFi.\"\"\" from __future__ import annotations import logging from typing", "import Any import voluptuous as vol from homeassistant.components import persistent_notification", "go to the [integrations page](/config/integrations) to re-configure it.\", \"Senz re-authentication\",", "Any import voluptuous as vol from homeassistant.components import persistent_notification from", "domain=DOMAIN ): \"\"\"Config flow to handle SENZ WiFi OAuth2 authentication.\"\"\"", "= resp[\"userName\"] existing_entry = await self.async_set_unique_id(account) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=data)", "await self.async_set_unique_id(account) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=data) await self.hass.config_entries.async_reload(existing_entry.entry_id) return self.async_abort(reason=\"reauth_successful\")", "{ \"scope\": \"restapi offline_access\", } async def async_step_reauth( self, entry:", "async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None )", "OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ): \"\"\"Config flow to handle SENZ WiFi", "DOMAIN from .pysenz import PreAPI class OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ):", "async_step_reauth( self, entry: dict[str, Any] | None = None )", "re-authentication\", \"senz_reauth\", ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self,", "= None ) -> FlowResult: \"\"\"Dialog that informs the user", "= await self.async_set_unique_id(account) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=data) await self.hass.config_entries.async_reload(existing_entry.entry_id) return", "resp[\"userName\"] existing_entry = await self.async_set_unique_id(account) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=data) await", "persistent_notification from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import config_entry_oauth2_flow from", "for account {entry['auth_implementation']} needs to be re-authenticated. Please go to", "\"senz_reauth\", ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input:", "annotations import logging from typing import Any import voluptuous as", "self.async_step_user() async def async_oauth_create_entry(self, data: dict) -> dict: \"\"\"Create an", "flow to handle SENZ WiFi OAuth2 authentication.\"\"\" DOMAIN = DOMAIN", "from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN from .pysenz", "that reauth is required.\"\"\" if user_input is None: return self.async_show_form(", "user_input: dict[str, Any] | None = None ) -> FlowResult:", "import DOMAIN from .pysenz import PreAPI class OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN", "@property def extra_authorize_data(self) -> dict: \"\"\"Extra data that needs to", "None: return self.async_show_form( step_id=\"reauth_confirm\", description_placeholders={\"account\": self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}), errors={}, ) persistent_notification.async_dismiss(self.hass,", "existing_entry = await self.async_set_unique_id(account) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=data) await self.hass.config_entries.async_reload(existing_entry.entry_id)", "an API authentication error.\"\"\" self.entry = entry persistent_notification.async_create( self.hass, f\"Senz", "= None ) -> FlowResult: \"\"\"Perform reauth upon an API", "await self.async_step_user() async def async_oauth_create_entry(self, data: dict) -> dict: \"\"\"Create", "dict: \"\"\"Extra data that needs to be appended to the", "WiFi.\"\"\" from __future__ import annotations import logging from typing import", "as vol from homeassistant.components import persistent_notification from homeassistant.data_entry_flow import FlowResult", "voluptuous as vol from homeassistant.components import persistent_notification from homeassistant.data_entry_flow import", "homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import", "logger(self) -> logging.Logger: \"\"\"Return logger.\"\"\" return logging.getLogger(__name__) @property def extra_authorize_data(self)", "offline_access\", } async def async_step_reauth( self, entry: dict[str, Any] |", "update existing entry for reauth.\"\"\" pre_api = PreAPI(self.hass) resp =", "entry for reauth.\"\"\" pre_api = PreAPI(self.hass) resp = await pre_api.getAccount(data[\"token\"][\"access_token\"])", "API authentication error.\"\"\" self.entry = entry persistent_notification.async_create( self.hass, f\"Senz integration", "self.async_set_unique_id(account) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=data) await self.hass.config_entries.async_reload(existing_entry.entry_id) return self.async_abort(reason=\"reauth_successful\") return", "the authorize url.\"\"\" return { \"scope\": \"restapi offline_access\", } async", "error.\"\"\" self.entry = entry persistent_notification.async_create( self.hass, f\"Senz integration for account", "persistent_notification.async_create( self.hass, f\"Senz integration for account {entry['auth_implementation']} needs to be", "\"\"\"Extra data that needs to be appended to the authorize", "is None: return self.async_show_form( step_id=\"reauth_confirm\", description_placeholders={\"account\": self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}), errors={}, )", "be re-authenticated. Please go to the [integrations page](/config/integrations) to re-configure", "\"\"\"Dialog that informs the user that reauth is required.\"\"\" if", "OAuth2 authentication.\"\"\" DOMAIN = DOMAIN @property def logger(self) -> logging.Logger:", "Any] | None = None ) -> FlowResult: \"\"\"Dialog that", "user that reauth is required.\"\"\" if user_input is None: return", "DOMAIN @property def logger(self) -> logging.Logger: \"\"\"Return logger.\"\"\" return logging.getLogger(__name__)", "self.hass, f\"Senz integration for account {entry['auth_implementation']} needs to be re-authenticated.", "logging.getLogger(__name__) @property def extra_authorize_data(self) -> dict: \"\"\"Extra data that needs", "import config_entry_oauth2_flow from .const import DOMAIN from .pysenz import PreAPI", "self.entry = entry persistent_notification.async_create( self.hass, f\"Senz integration for account {entry['auth_implementation']}", "None ) -> FlowResult: \"\"\"Dialog that informs the user that", "appended to the authorize url.\"\"\" return { \"scope\": \"restapi offline_access\",", "to be re-authenticated. Please go to the [integrations page](/config/integrations) to", ") return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str,", "def extra_authorize_data(self) -> dict: \"\"\"Extra data that needs to be", "that informs the user that reauth is required.\"\"\" if user_input", "step_id=\"reauth_confirm\", description_placeholders={\"account\": self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}), errors={}, ) persistent_notification.async_dismiss(self.hass, \"senz_reauth\") return await", ".const import DOMAIN from .pysenz import PreAPI class OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler,", "None = None ) -> FlowResult: \"\"\"Dialog that informs the", ".pysenz import PreAPI class OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ): \"\"\"Config flow", "\"senz_reauth\") return await self.async_step_user() async def async_oauth_create_entry(self, data: dict) ->", "re-configure it.\", \"Senz re-authentication\", \"senz_reauth\", ) return await self.async_step_reauth_confirm() async", "dict[str, Any] | None = None ) -> FlowResult: \"\"\"Perform", "informs the user that reauth is required.\"\"\" if user_input is", "resp = await pre_api.getAccount(data[\"token\"][\"access_token\"]) account = resp[\"userName\"] existing_entry = await", "-> FlowResult: \"\"\"Dialog that informs the user that reauth is", "PreAPI(self.hass) resp = await pre_api.getAccount(data[\"token\"][\"access_token\"]) account = resp[\"userName\"] existing_entry =", "typing import Any import voluptuous as vol from homeassistant.components import", "\"\"\"Return logger.\"\"\" return logging.getLogger(__name__) @property def extra_authorize_data(self) -> dict: \"\"\"Extra", "logging.Logger: \"\"\"Return logger.\"\"\" return logging.getLogger(__name__) @property def extra_authorize_data(self) -> dict:", "Please go to the [integrations page](/config/integrations) to re-configure it.\", \"Senz", "async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None =", "<gh_stars>1-10 \"\"\"Config flow for SENZ WiFi.\"\"\" from __future__ import annotations", "config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ): \"\"\"Config flow to handle SENZ WiFi OAuth2", "authentication.\"\"\" DOMAIN = DOMAIN @property def logger(self) -> logging.Logger: \"\"\"Return", "| None = None ) -> FlowResult: \"\"\"Dialog that informs", "oauth config entry or update existing entry for reauth.\"\"\" pre_api", "\"restapi offline_access\", } async def async_step_reauth( self, entry: dict[str, Any]", "SENZ WiFi OAuth2 authentication.\"\"\" DOMAIN = DOMAIN @property def logger(self)", "return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any]", "self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None", "Any] | None = None ) -> FlowResult: \"\"\"Perform reauth", "async_oauth_create_entry(self, data: dict) -> dict: \"\"\"Create an oauth config entry", "authorize url.\"\"\" return { \"scope\": \"restapi offline_access\", } async def", "None ) -> FlowResult: \"\"\"Perform reauth upon an API authentication", ") -> FlowResult: \"\"\"Perform reauth upon an API authentication error.\"\"\"", "if user_input is None: return self.async_show_form( step_id=\"reauth_confirm\", description_placeholders={\"account\": self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}),", "= await pre_api.getAccount(data[\"token\"][\"access_token\"]) account = resp[\"userName\"] existing_entry = await self.async_set_unique_id(account)", "import logging from typing import Any import voluptuous as vol", "if existing_entry: self.hass.config_entries.async_update_entry(existing_entry, data=data) await self.hass.config_entries.async_reload(existing_entry.entry_id) return self.async_abort(reason=\"reauth_successful\") return self.async_create_entry(title=account,", "PreAPI class OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ): \"\"\"Config flow to handle", "reauth upon an API authentication error.\"\"\" self.entry = entry persistent_notification.async_create(", "{entry['auth_implementation']} needs to be re-authenticated. Please go to the [integrations", "None = None ) -> FlowResult: \"\"\"Perform reauth upon an", "import persistent_notification from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import config_entry_oauth2_flow", "integration for account {entry['auth_implementation']} needs to be re-authenticated. Please go", "config entry or update existing entry for reauth.\"\"\" pre_api =", "the user that reauth is required.\"\"\" if user_input is None:", "async def async_oauth_create_entry(self, data: dict) -> dict: \"\"\"Create an oauth", "self, user_input: dict[str, Any] | None = None ) ->", "url.\"\"\" return { \"scope\": \"restapi offline_access\", } async def async_step_reauth(", "DOMAIN = DOMAIN @property def logger(self) -> logging.Logger: \"\"\"Return logger.\"\"\"", "entry or update existing entry for reauth.\"\"\" pre_api = PreAPI(self.hass)", "upon an API authentication error.\"\"\" self.entry = entry persistent_notification.async_create( self.hass,", "for reauth.\"\"\" pre_api = PreAPI(self.hass) resp = await pre_api.getAccount(data[\"token\"][\"access_token\"]) account", "dict[str, Any] | None = None ) -> FlowResult: \"\"\"Dialog", "reauth is required.\"\"\" if user_input is None: return self.async_show_form( step_id=\"reauth_confirm\",", "pre_api.getAccount(data[\"token\"][\"access_token\"]) account = resp[\"userName\"] existing_entry = await self.async_set_unique_id(account) if existing_entry:", "import PreAPI class OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ): \"\"\"Config flow to", "existing entry for reauth.\"\"\" pre_api = PreAPI(self.hass) resp = await", "= DOMAIN @property def logger(self) -> logging.Logger: \"\"\"Return logger.\"\"\" return", "needs to be appended to the authorize url.\"\"\" return {", "-> logging.Logger: \"\"\"Return logger.\"\"\" return logging.getLogger(__name__) @property def extra_authorize_data(self) ->", "return logging.getLogger(__name__) @property def extra_authorize_data(self) -> dict: \"\"\"Extra data that", "\"Senz re-authentication\", \"senz_reauth\", ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm(", "required.\"\"\" if user_input is None: return self.async_show_form( step_id=\"reauth_confirm\", description_placeholders={\"account\": self.entry[\"auth_implementation\"]},", ") persistent_notification.async_dismiss(self.hass, \"senz_reauth\") return await self.async_step_user() async def async_oauth_create_entry(self, data:", "or update existing entry for reauth.\"\"\" pre_api = PreAPI(self.hass) resp", "re-authenticated. Please go to the [integrations page](/config/integrations) to re-configure it.\",", "= PreAPI(self.hass) resp = await pre_api.getAccount(data[\"token\"][\"access_token\"]) account = resp[\"userName\"] existing_entry", "f\"Senz integration for account {entry['auth_implementation']} needs to be re-authenticated. Please", "is required.\"\"\" if user_input is None: return self.async_show_form( step_id=\"reauth_confirm\", description_placeholders={\"account\":", "pre_api = PreAPI(self.hass) resp = await pre_api.getAccount(data[\"token\"][\"access_token\"]) account = resp[\"userName\"]", "FlowResult: \"\"\"Dialog that informs the user that reauth is required.\"\"\"", "self, entry: dict[str, Any] | None = None ) ->", "it.\", \"Senz re-authentication\", \"senz_reauth\", ) return await self.async_step_reauth_confirm() async def", "data: dict) -> dict: \"\"\"Create an oauth config entry or", "\"\"\"Config flow for SENZ WiFi.\"\"\" from __future__ import annotations import", "} async def async_step_reauth( self, entry: dict[str, Any] | None", "dict) -> dict: \"\"\"Create an oauth config entry or update", "handle SENZ WiFi OAuth2 authentication.\"\"\" DOMAIN = DOMAIN @property def", "def async_step_reauth( self, entry: dict[str, Any] | None = None", "to the authorize url.\"\"\" return { \"scope\": \"restapi offline_access\", }", "entry: dict[str, Any] | None = None ) -> FlowResult:", "-> FlowResult: \"\"\"Perform reauth upon an API authentication error.\"\"\" self.entry", "return await self.async_step_user() async def async_oauth_create_entry(self, data: dict) -> dict:", "self.async_show_form( step_id=\"reauth_confirm\", description_placeholders={\"account\": self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}), errors={}, ) persistent_notification.async_dismiss(self.hass, \"senz_reauth\") return", ") -> FlowResult: \"\"\"Dialog that informs the user that reauth", "account = resp[\"userName\"] existing_entry = await self.async_set_unique_id(account) if existing_entry: self.hass.config_entries.async_update_entry(existing_entry,", "to the [integrations page](/config/integrations) to re-configure it.\", \"Senz re-authentication\", \"senz_reauth\",", "that needs to be appended to the authorize url.\"\"\" return", "logging from typing import Any import voluptuous as vol from", "from homeassistant.components import persistent_notification from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers", "the [integrations page](/config/integrations) to re-configure it.\", \"Senz re-authentication\", \"senz_reauth\", )", "await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] |", "self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}), errors={}, ) persistent_notification.async_dismiss(self.hass, \"senz_reauth\") return await self.async_step_user() async", "dict: \"\"\"Create an oauth config entry or update existing entry", "page](/config/integrations) to re-configure it.\", \"Senz re-authentication\", \"senz_reauth\", ) return await", "-> dict: \"\"\"Extra data that needs to be appended to", "__future__ import annotations import logging from typing import Any import", "to handle SENZ WiFi OAuth2 authentication.\"\"\" DOMAIN = DOMAIN @property", "flow for SENZ WiFi.\"\"\" from __future__ import annotations import logging", "return { \"scope\": \"restapi offline_access\", } async def async_step_reauth( self,", "account {entry['auth_implementation']} needs to be re-authenticated. Please go to the", "[integrations page](/config/integrations) to re-configure it.\", \"Senz re-authentication\", \"senz_reauth\", ) return", "from __future__ import annotations import logging from typing import Any", "FlowResult: \"\"\"Perform reauth upon an API authentication error.\"\"\" self.entry =", "\"\"\"Create an oauth config entry or update existing entry for", "await pre_api.getAccount(data[\"token\"][\"access_token\"]) account = resp[\"userName\"] existing_entry = await self.async_set_unique_id(account) if", "@property def logger(self) -> logging.Logger: \"\"\"Return logger.\"\"\" return logging.getLogger(__name__) @property", "= entry persistent_notification.async_create( self.hass, f\"Senz integration for account {entry['auth_implementation']} needs", "be appended to the authorize url.\"\"\" return { \"scope\": \"restapi", "description_placeholders={\"account\": self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}), errors={}, ) persistent_notification.async_dismiss(self.hass, \"senz_reauth\") return await self.async_step_user()", "data_schema=vol.Schema({}), errors={}, ) persistent_notification.async_dismiss(self.hass, \"senz_reauth\") return await self.async_step_user() async def", "from typing import Any import voluptuous as vol from homeassistant.components", "def logger(self) -> logging.Logger: \"\"\"Return logger.\"\"\" return logging.getLogger(__name__) @property def", "data that needs to be appended to the authorize url.\"\"\"", "authentication error.\"\"\" self.entry = entry persistent_notification.async_create( self.hass, f\"Senz integration for", "def async_oauth_create_entry(self, data: dict) -> dict: \"\"\"Create an oauth config", "for SENZ WiFi.\"\"\" from __future__ import annotations import logging from", "config_entry_oauth2_flow from .const import DOMAIN from .pysenz import PreAPI class", "import FlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN", "def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None", "to be appended to the authorize url.\"\"\" return { \"scope\":", "return self.async_show_form( step_id=\"reauth_confirm\", description_placeholders={\"account\": self.entry[\"auth_implementation\"]}, data_schema=vol.Schema({}), errors={}, ) persistent_notification.async_dismiss(self.hass, \"senz_reauth\")", "vol from homeassistant.components import persistent_notification from homeassistant.data_entry_flow import FlowResult from", "homeassistant.components import persistent_notification from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import", "needs to be re-authenticated. Please go to the [integrations page](/config/integrations)", "\"\"\"Config flow to handle SENZ WiFi OAuth2 authentication.\"\"\" DOMAIN =", "FlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN from", "WiFi OAuth2 authentication.\"\"\" DOMAIN = DOMAIN @property def logger(self) ->", "entry persistent_notification.async_create( self.hass, f\"Senz integration for account {entry['auth_implementation']} needs to", "an oauth config entry or update existing entry for reauth.\"\"\"", "from .pysenz import PreAPI class OAuth2FlowHandler( config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN ): \"\"\"Config", "import annotations import logging from typing import Any import voluptuous" ]
[ "True, the full SHA1 hash will be returned. Otherwise, the", "path = os.getcwd() if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if", "as e: if show_warning: warnings.warn('Error running git: ' + str(e))", "' + str(e)) return (None, b'', b'') if p.returncode ==", "show_warning: warnings.warn('No git repository present at {0!r}! Using ' 'default", "subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr =", "a filename it uses the directory containing that file. Returns", "= stream.decode('latin1') return text def update_git_devstr(version, path=None): \"\"\" Updates the", "of the git repository this path is under. If given,", "in this repository. Parameters ---------- sha : bool If True,", "creating a fake package, so it doesn't get picked up", "file. Returns ------- devversion : str Either a string with", "copy. This ensures that the revision number in the version", "' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return p.returncode, stdout,", "'HEAD'] else: cmd = ['rev-list', '--count', 'HEAD'] def run_git(cmd): try:", "129: if show_warning: warnings.warn('Your git looks old (does it support", "than the full hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD']", "Parameters ---------- sha : bool If True, the full SHA1", "# return the default dev version of '0'. return '0'", "root of the git repository this path is under. If", "<reponame>bsipocz/astropy-helpers # Licensed under a 3-clause BSD style license -", "if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha: # Faster", "SHA1 hash will be returned. Otherwise, the total count of", "only ever executed within a subprocess when # creating a", "could not be determined to belong to a git repo.", "git revision string if and only if the path is", "b'') if p.returncode == 128: if show_warning: warnings.warn('No git repository", "we # return the default dev version of '0'. return", "happen in git versions older than 1.7.2 # where the", "Returns ------- devversion : str Either a string with the", "the lines if returncode == 0: return str(stdout.count(b'\\n')) else: return", "# astropy_helpers.version_helpers to determine the beginning of the code in", "# Licensed under a 3-clause BSD style license - see", "way to determine if we're in git or not -", "if show_warning: warnings.warn('Git failed while determining revision ' 'count: {0}'.format(_decode_stdio(stderr)))", "returncode == 129: # git returns 129 if a command", "this path is under. If given, this won't look any", "revision number in the version string is accurate. \"\"\" try:", "warnings.warn('Error running git: ' + str(e)) return (None, b'', b'')", "p.returncode != 0: if show_warning: warnings.warn('Git failed while determining revision", "and counting # the lines if returncode == 0: return", "then the given path must be the root of the", "to belong to a git repo. \"\"\" if os.path.isfile(pathname): current_dir", "# git returns 128 if the command is not run", "\"\"\" Utilities for retrieving revision information from a project's git", "(rather than the full hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0',", "the directory to look in to find the git repository.", "stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate() except OSError as", "warning if git returns an error code, otherwise errors pass", "of commits in the repository will be used as a", "filename it uses the directory containing that file. Returns -------", "look any higher than ``levels`` (that is, if ``levels=0`` then", "'HEAD'] def run_git(cmd): try: p = subprocess.Popen(['git'] + cmd, cwd=path,", "could not be identified. \"\"\" if path is None: path", "but we # return the default dev version of '0'.", "where the --count option is not supported # Also use", "git or not - returns '' if not devstr =", "if show_warning: warnings.warn('Error running git: ' + str(e)) return (None,", "directory containing that file. Returns ------- devversion : str Either", "imported directly from a git working copy. This ensures that", "sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() # This function is", "if git version info could not be identified. \"\"\" if", "def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except ValueError:", "path=None): \"\"\" Determines the number of revisions in this repository.", "must be the root of the git repository and is", "{0!r}! Using ' 'default dev version.'.format(path)) return (p.returncode, b'', b'')", "and only if the path is being imported directly from", "the old method of getting all revisions and counting #", "be the root of the git repository and is returned", "not supported # Also use --abbrev-commit and --abbrev=0 to display", "\"revision number\". show_warning : bool If True, issue a warning", "not devstr: # Probably not in git so just pass", "could happen in git versions older than 1.7.2 # where", "(does it support {0}?); ' 'consider upgrading to v1.7.2 or", "a project's git repository. \"\"\" # Do not remove the", "= run_git(cmd) if not sha and returncode == 128: #", "{0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return p.returncode, stdout, stderr returncode,", "the git revision string if and only if the path", "def update_git_devstr(version, path=None): \"\"\" Updates the git revision string if", "is tested but it is only ever executed within a", "# repository tree. In this case, a warning is produced", "or directory name, determine the root of the git repository", "_decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding", "the directory containing that file. Returns ------- devversion : str", "is produced above but we # return the default dev", "root of the git repository. If given a filename it", "the given path could not be determined to belong to", "not sha and returncode == 128: # git returns 128", "by coverage metrics. def _get_repo_path(pathname, levels=None): # pragma: no cover", "try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding =", "git version info could not be identified. \"\"\" if path", "returncode, stdout, stderr = run_git(cmd) # Fall back on the", "return version def get_git_devstr(sha=False, show_warning=True, path=None): \"\"\" Determines the number", "elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return None current_level =", "= 0 while levels is None or current_level <= levels:", "--abbrev-commit and --abbrev=0 to display the minimum # number of", "Either a string with the revision number (if `sha` is", "if ``levels=0`` then the given path must be the root", "errors pass silently. path : str or None If a", "not be identified. \"\"\" if path is None: path =", "accurate. \"\"\" try: # Quick way to determine if we're", "as a \"revision number\". show_warning : bool If True, issue", "version.'.format(path)) return (p.returncode, b'', b'') if p.returncode == 129: if", "it is used by # astropy_helpers.version_helpers to determine the beginning", "repository tree. In this case, a warning is produced above", "Fall back on the old method of getting all revisions", "LICENSE.rst \"\"\" Utilities for retrieving revision information from a project's", "repository. If `None`, the current working directory is used, and", "cmd = ['rev-list', '--count', 'HEAD'] def run_git(cmd): try: p =", "this could happen in git versions older than 1.7.2 #", "of getting all revisions and counting # the lines if", "git repo. \"\"\" if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname):", "at {0!r}! Using ' 'default dev version.'.format(path)) return (p.returncode, b'',", "package, so it doesn't get picked up by coverage metrics.", "a command option failed to parse; in # particular this", "'.git')): return current_dir current_level += 1 if current_dir == os.path.dirname(current_dir):", "version if 'dev' in version: # update to the current", "of the git repository. If given a filename it uses", "None If a string, specifies the directory to look in", "git so just pass silently return version if 'dev' in", "the git repository. If given a filename it uses the", "return version if not devstr: # Probably not in git", "if the command is not run from within a git", "str(e)) return (None, b'', b'') if p.returncode == 128: if", "returned if so. Returns `None` if the given path could", "version of '0'. return '0' elif not sha and returncode", "git versions older than 1.7.2 # where the --count option", "current_dir current_level += 1 if current_dir == os.path.dirname(current_dir): break current_dir", "the beginning of the code in # this module #", "is not supported # Also use --abbrev-commit and --abbrev=0 to", "sha and returncode == 128: # git returns 128 if", "text = stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback text =", "warnings.warn('No git repository present at {0!r}! Using ' 'default dev", "determine the beginning of the code in # this module", "the full hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode,", "else: return None current_level = 0 while levels is None", "return (p.returncode, stdout, stderr) return p.returncode, stdout, stderr returncode, stdout,", "--abbrev=0 to display the minimum # number of characters needed", "cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate() except", "+= 1 if current_dir == os.path.dirname(current_dir): break current_dir = os.path.dirname(current_dir)", "elif not sha and returncode == 129: # git returns", "method of getting all revisions and counting # the lines", "version string is accurate. \"\"\" try: # Quick way to", "get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version if not devstr:", "to display the minimum # number of characters needed per-commit", "v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif p.returncode", "current working directory is used, and must be the root", "129 if a command option failed to parse; in #", "git repository present at {0!r}! Using ' 'default dev version.'.format(path))", "hash will be returned. Otherwise, the total count of commits", "returncode == 0: return str(stdout.count(b'\\n')) else: return '' elif sha:", "subprocess when # creating a fake package, so it doesn't", "if returncode == 0: return str(stdout.count(b'\\n')) else: return '' elif", "\"\"\" if path is None: path = os.getcwd() if not", "in git or not - returns '' if not devstr", "or current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level", "git repository and is returned if so. Returns `None` if", "following comment; it is used by # astropy_helpers.version_helpers to determine", "str Either a string with the revision number (if `sha`", "== 129: if show_warning: warnings.warn('Your git looks old (does it", "look in to find the git repository. If `None`, the", "(p.returncode, stdout, stderr) elif p.returncode != 0: if show_warning: warnings.warn('Git", "+ str(e)) return (None, b'', b'') if p.returncode == 128:", "== 128: if show_warning: warnings.warn('No git repository present at {0!r}!", "git returns 128 if the command is not run from", "of HEAD cmd = ['rev-parse', 'HEAD'] else: cmd = ['rev-list',", "command option failed to parse; in # particular this could", "(if `sha` is False), the SHA1 hash of the current", "in version: # update to the current git revision version_base", "git repository. If given a filename it uses the directory", "failed while determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout,", "\"\"\" Updates the git revision string if and only if", "os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return None current_level = 0", "else: # otherwise it's already the true/release version return version", "the code in # this module # BEGIN import locale", "= os.getcwd() if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha:", "git returns 129 if a command option failed to parse;", "Licensed under a 3-clause BSD style license - see LICENSE.rst", "if and only if the path is being imported directly", "'utf-8' try: text = stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback", "any higher than ``levels`` (that is, if ``levels=0`` then the", "already the true/release version return version def get_git_devstr(sha=False, show_warning=True, path=None):", "otherwise it's already the true/release version return version def get_git_devstr(sha=False,", "0 while levels is None or current_level <= levels: if", "return '0' elif not sha and returncode == 129: #", "see LICENSE.rst \"\"\" Utilities for retrieving revision information from a", "# creating a fake package, so it doesn't get picked", "p = subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout,", "to find the git repository. If `None`, the current working", "determine the root of the git repository this path is", "given, this won't look any higher than ``levels`` (that is,", "devversion : str Either a string with the revision number", "is, if ``levels=0`` then the given path must be the", "per-commit (rather than the full hash) cmd = ['rev-list', '--abbrev-commit',", "elif p.returncode != 0: if show_warning: warnings.warn('Git failed while determining", "update to the current git revision version_base = version.split('.dev', 1)[0]", "the root of the git repository. If given a filename", "if the given path could not be determined to belong", "a git # repository tree. In this case, a warning", "return p.returncode, stdout, stderr returncode, stdout, stderr = run_git(cmd) if", "be returned. Otherwise, the total count of commits in the", "determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return", "root of the git repository and is returned if so.", "' 'consider upgrading to v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode,", "when # creating a fake package, so it doesn't get", "the repository will be used as a \"revision number\". show_warning", "# This function is tested but it is only ever", "= p.communicate() except OSError as e: if show_warning: warnings.warn('Error running", "repository will be used as a \"revision number\". show_warning :", "given path could not be determined to belong to a", "within a git # repository tree. In this case, a", "of '0'. return '0' elif not sha and returncode ==", "just pass silently return version if 'dev' in version: #", "except ValueError: stdio_encoding = 'utf-8' try: text = stream.decode(stdio_encoding) except", "determined to belong to a git repo. \"\"\" if os.path.isfile(pathname):", "= subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr", "running git: ' + str(e)) return (None, b'', b'') if", "+ devstr else: # otherwise it's already the true/release version", "stdout, stderr returncode, stdout, stderr = run_git(cmd) if not sha", "current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return", "case, a warning is produced above but we # return", "belong to a git repo. \"\"\" if os.path.isfile(pathname): current_dir =", "under a 3-clause BSD style license - see LICENSE.rst \"\"\"", "full SHA1 hash will be returned. Otherwise, the total count", "# Faster for getting just the hash of HEAD cmd", "be the root of the git repository. If given a", "higher than ``levels`` (that is, if ``levels=0`` then the given", "pass silently return version if 'dev' in version: # update", "text = stream.decode('latin1') return text def update_git_devstr(version, path=None): \"\"\" Updates", "an empty string if git version info could not be", "is used, and must be the root of the git", "given a filename it uses the directory containing that file.", "a warning if git returns an error code, otherwise errors", "else: cmd = ['rev-list', '--count', 'HEAD'] def run_git(cmd): try: p", "Final fallback text = stream.decode('latin1') return text def update_git_devstr(version, path=None):", "back on the old method of getting all revisions and", "a string with the revision number (if `sha` is False),", "'0' elif not sha and returncode == 129: # git", "under. If given, this won't look any higher than ``levels``", "if git returns an error code, otherwise errors pass silently.", "characters needed per-commit (rather than the full hash) cmd =", "os.path.abspath(pathname) else: return None current_level = 0 while levels is", "is False), the SHA1 hash of the current commit (if", "and returncode == 128: # git returns 128 if the", "run_git(cmd): try: p = subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE,", "== 128: # git returns 128 if the command is", "stdin=subprocess.PIPE) stdout, stderr = p.communicate() except OSError as e: if", "is being imported directly from a git working copy. This", "a file or directory name, determine the root of the", "except UnicodeDecodeError: # Final fallback text = stream.decode('latin1') return text", "it is only ever executed within a subprocess when #", "Otherwise, the total count of commits in the repository will", "given path must be the root of the git repository", "'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return p.returncode, stdout, stderr", "os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha: # Faster for getting", "str or None If a string, specifies the directory to", "from a git working copy. This ensures that the revision", "devstr: # Probably not in git so just pass silently", "commits in the repository will be used as a \"revision", "code in # this module # BEGIN import locale import", "is only ever executed within a subprocess when # creating", "beginning of the code in # this module # BEGIN", "the current working directory is used, and must be the", "will be returned. Otherwise, the total count of commits in", "show_warning=False, path=path) except OSError: return version if not devstr: #", "working copy. This ensures that the revision number in the", "the following comment; it is used by # astropy_helpers.version_helpers to", "must be the root of the git repository. If given", "of revisions in this repository. Parameters ---------- sha : bool", "return (p.returncode, stdout, stderr) elif p.returncode != 0: if show_warning:", "comment; it is used by # astropy_helpers.version_helpers to determine the", "supported # Also use --abbrev-commit and --abbrev=0 to display the", "revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr) return p.returncode,", "- see LICENSE.rst \"\"\" Utilities for retrieving revision information from", "_decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() # This function is tested but", "levels=None): # pragma: no cover \"\"\" Given a file or", "repository. If given a filename it uses the directory containing", "show_warning: warnings.warn('Your git looks old (does it support {0}?); '", "if p.returncode == 129: if show_warning: warnings.warn('Your git looks old", "a git repo. \"\"\" if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif", "try: p = subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)", "full hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout,", "path must be the root of the git repository and", "# update to the current git revision version_base = version.split('.dev',", "version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return", "bool If True, issue a warning if git returns an", "string with the revision number (if `sha` is False), the", "sha : bool If True, the full SHA1 hash will", "that file. Returns ------- devversion : str Either a string", "stdout, stderr) elif p.returncode != 0: if show_warning: warnings.warn('Git failed", "is used by # astropy_helpers.version_helpers to determine the beginning of", "repository present at {0!r}! Using ' 'default dev version.'.format(path)) return", "ValueError: stdio_encoding = 'utf-8' try: text = stream.decode(stdio_encoding) except UnicodeDecodeError:", "path=path) except OSError: return version if not devstr: # Probably", "pass silently. path : str or None If a string,", "to a git repo. \"\"\" if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname))", "for getting just the hash of HEAD cmd = ['rev-parse',", "= run_git(cmd) # Fall back on the old method of", "current_dir = os.path.abspath(pathname) else: return None current_level = 0 while", "path could not be determined to belong to a git", "locale import os import subprocess import warnings def _decode_stdio(stream): try:", "metrics. def _get_repo_path(pathname, levels=None): # pragma: no cover \"\"\" Given", "stderr = run_git(cmd) if not sha and returncode == 128:", "older than 1.7.2 # where the --count option is not", "return (p.returncode, b'', b'') if p.returncode == 129: if show_warning:", "show_warning=True, path=None): \"\"\" Determines the number of revisions in this", "hash) cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr", "\"\"\" if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir =", "the root of the git repository and is returned if", "if we're in git or not - returns '' if", "it doesn't get picked up by coverage metrics. def _get_repo_path(pathname,", "None current_level = 0 while levels is None or current_level", "to determine the beginning of the code in # this", "(if `sha` is True), or an empty string if git", "try: # Quick way to determine if we're in git", "or None If a string, specifies the directory to look", "counting # the lines if returncode == 0: return str(stdout.count(b'\\n'))", "= ['rev-parse', 'HEAD'] else: cmd = ['rev-list', '--count', 'HEAD'] def", "within a subprocess when # creating a fake package, so", "is None: path = os.getcwd() if not os.path.isdir(path): path =", "up by coverage metrics. def _get_repo_path(pathname, levels=None): # pragma: no", "working directory is used, and must be the root of", "------- devversion : str Either a string with the revision", "return text def update_git_devstr(version, path=None): \"\"\" Updates the git revision", "def run_git(cmd): try: p = subprocess.Popen(['git'] + cmd, cwd=path, stdout=subprocess.PIPE,", "empty string if git version info could not be identified.", "used as a \"revision number\". show_warning : bool If True,", "directory to look in to find the git repository. If", "number (if `sha` is False), the SHA1 hash of the", "sha: # Faster for getting just the hash of HEAD", "revisions and counting # the lines if returncode == 0:", "devstr else: # otherwise it's already the true/release version return", "stderr = p.communicate() except OSError as e: if show_warning: warnings.warn('Error", "this case, a warning is produced above but we #", "cmd = ['rev-parse', 'HEAD'] else: cmd = ['rev-list', '--count', 'HEAD']", "stderr returncode, stdout, stderr = run_git(cmd) if not sha and", "repository and is returned if so. Returns `None` if the", "will be used as a \"revision number\". show_warning : bool", "the true/release version return version def get_git_devstr(sha=False, show_warning=True, path=None): \"\"\"", "In this case, a warning is produced above but we", "`sha` is False), the SHA1 hash of the current commit", "return str(stdout.count(b'\\n')) else: return '' elif sha: return _decode_stdio(stdout)[:40] else:", "module # BEGIN import locale import os import subprocess import", "--count option is not supported # Also use --abbrev-commit and", "it support {0}?); ' 'consider upgrading to v1.7.2 or '", "``levels`` (that is, if ``levels=0`` then the given path must", "show_warning: warnings.warn('Error running git: ' + str(e)) return (None, b'',", ": bool If True, the full SHA1 hash will be", "project's git repository. \"\"\" # Do not remove the following", "to parse; in # particular this could happen in git", "= get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' + devstr", "warnings.warn('Your git looks old (does it support {0}?); ' 'consider", "# Do not remove the following comment; it is used", "'' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError:", "the hash of HEAD cmd = ['rev-parse', 'HEAD'] else: cmd", "so it doesn't get picked up by coverage metrics. def", "= 'utf-8' try: text = stream.decode(stdio_encoding) except UnicodeDecodeError: # Final", "or 'utf-8' except ValueError: stdio_encoding = 'utf-8' try: text =", "current commit (if `sha` is True), or an empty string", "the default dev version of '0'. return '0' elif not", "be identified. \"\"\" if path is None: path = os.getcwd()", "path : str or None If a string, specifies the", "the version string is accurate. \"\"\" try: # Quick way", "use --abbrev-commit and --abbrev=0 to display the minimum # number", "' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif p.returncode != 0:", "directory name, determine the root of the git repository this", "repository. Parameters ---------- sha : bool If True, the full", "is accurate. \"\"\" try: # Quick way to determine if", "== 129: # git returns 129 if a command option", "but it is only ever executed within a subprocess when", "a 3-clause BSD style license - see LICENSE.rst \"\"\" Utilities", "returned. Otherwise, the total count of commits in the repository", "cmd = ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr =", "number\". show_warning : bool If True, issue a warning if", "+ cmd, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate()", "return _decode_stdio(stdout).strip() # This function is tested but it is", "ever executed within a subprocess when # creating a fake", "try: text = stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback text", "is returned if so. Returns `None` if the given path", "warning is produced above but we # return the default", "Quick way to determine if we're in git or not", "'utf-8' except ValueError: stdio_encoding = 'utf-8' try: text = stream.decode(stdio_encoding)", "returns '' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except", "the git repository this path is under. If given, this", "doesn't get picked up by coverage metrics. def _get_repo_path(pathname, levels=None):", "['rev-list', '--count', 'HEAD'] def run_git(cmd): try: p = subprocess.Popen(['git'] +", "be used as a \"revision number\". show_warning : bool If", "or not - returns '' if not devstr = get_git_devstr(sha=True,", "path=None): \"\"\" Updates the git revision string if and only", "failed to parse; in # particular this could happen in", "' 'default dev version.'.format(path)) return (p.returncode, b'', b'') if p.returncode", "in the repository will be used as a \"revision number\".", "fake package, so it doesn't get picked up by coverage", "identified. \"\"\" if path is None: path = os.getcwd() if", "while levels is None or current_level <= levels: if os.path.exists(os.path.join(current_dir,", "_decode_stdio(stdout).strip() # This function is tested but it is only", "getting just the hash of HEAD cmd = ['rev-parse', 'HEAD']", "stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding = 'utf-8'", "os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return None current_level", "dev version.'.format(path)) return (p.returncode, b'', b'') if p.returncode == 129:", "# Quick way to determine if we're in git or", "return version if 'dev' in version: # update to the", "from within a git # repository tree. In this case,", "specifies the directory to look in to find the git", "get_git_devstr(sha=False, show_warning=True, path=None): \"\"\" Determines the number of revisions in", "# this module # BEGIN import locale import os import", "current_level += 1 if current_dir == os.path.dirname(current_dir): break current_dir =", "a fake package, so it doesn't get picked up by", "# particular this could happen in git versions older than", "warnings.warn('Git failed while determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode,", "repository this path is under. If given, this won't look", "return (None, b'', b'') if p.returncode == 128: if show_warning:", "used by # astropy_helpers.version_helpers to determine the beginning of the", "if 'dev' in version: # update to the current git", "+ '.dev' + devstr else: # otherwise it's already the", "in git versions older than 1.7.2 # where the --count", "os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else:", "string if git version info could not be identified. \"\"\"", "code, otherwise errors pass silently. path : str or None", "dev version of '0'. return '0' elif not sha and", "fallback text = stream.decode('latin1') return text def update_git_devstr(version, path=None): \"\"\"", "not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha: # Faster for", "it's already the true/release version return version def get_git_devstr(sha=False, show_warning=True,", "OSError: return version if not devstr: # Probably not in", "p.returncode, stdout, stderr returncode, stdout, stderr = run_git(cmd) if not", "subprocess import warnings def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or", "string, specifies the directory to look in to find the", "== 0: return str(stdout.count(b'\\n')) else: return '' elif sha: return", "just the hash of HEAD cmd = ['rev-parse', 'HEAD'] else:", "= locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding = 'utf-8' try:", "'HEAD'] returncode, stdout, stderr = run_git(cmd) # Fall back on", "hash of HEAD cmd = ['rev-parse', 'HEAD'] else: cmd =", "the SHA1 hash of the current commit (if `sha` is", "# where the --count option is not supported # Also", "if show_warning: warnings.warn('No git repository present at {0!r}! Using '", "version_base + '.dev' + devstr else: # otherwise it's already", "'consider upgrading to v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode, stdout,", "(that is, if ``levels=0`` then the given path must be", "None: path = os.getcwd() if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path))", "stdout, stderr = run_git(cmd) if not sha and returncode ==", "count of commits in the repository will be used as", "locale.getdefaultlocale()[1] or 'utf-8' except ValueError: stdio_encoding = 'utf-8' try: text", "directory is used, and must be the root of the", "style license - see LICENSE.rst \"\"\" Utilities for retrieving revision", "os.getcwd() if not os.path.isdir(path): path = os.path.abspath(os.path.dirname(path)) if sha: #", "this won't look any higher than ``levels`` (that is, if", "old (does it support {0}?); ' 'consider upgrading to v1.7.2", "= get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version if not", "find the git repository. If `None`, the current working directory", "# Also use --abbrev-commit and --abbrev=0 to display the minimum", "the current commit (if `sha` is True), or an empty", "path is being imported directly from a git working copy.", "version if not devstr: # Probably not in git so", "silently. path : str or None If a string, specifies", "return current_dir current_level += 1 if current_dir == os.path.dirname(current_dir): break", "b'', b'') if p.returncode == 129: if show_warning: warnings.warn('Your git", "elif sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() # This function", "current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level +=", "import warnings def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8'", "None or current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir", "import locale import os import subprocess import warnings def _decode_stdio(stream):", "is True), or an empty string if git version info", "stdio_encoding = 'utf-8' try: text = stream.decode(stdio_encoding) except UnicodeDecodeError: #", "commit (if `sha` is True), or an empty string if", "all revisions and counting # the lines if returncode ==", "in to find the git repository. If `None`, the current", "levels is None or current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')):", "string is accurate. \"\"\" try: # Quick way to determine", "support {0}?); ' 'consider upgrading to v1.7.2 or ' 'later.'.format(cmd[0]))", "def get_git_devstr(sha=False, show_warning=True, path=None): \"\"\" Determines the number of revisions", "if not sha and returncode == 128: # git returns", "# BEGIN import locale import os import subprocess import warnings", "the number of revisions in this repository. Parameters ---------- sha", "stdout, stderr) return p.returncode, stdout, stderr returncode, stdout, stderr =", "it uses the directory containing that file. Returns ------- devversion", "with the revision number (if `sha` is False), the SHA1", "lines if returncode == 0: return str(stdout.count(b'\\n')) else: return ''", "bool If True, the full SHA1 hash will be returned.", "os.path.abspath(os.path.dirname(path)) if sha: # Faster for getting just the hash", "retrieving revision information from a project's git repository. \"\"\" #", "determine if we're in git or not - returns ''", "stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate() except OSError as e:", "ensures that the revision number in the version string is", "astropy_helpers.version_helpers to determine the beginning of the code in #", "looks old (does it support {0}?); ' 'consider upgrading to", "git repository this path is under. If given, this won't", "return '' elif sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() #", "git: ' + str(e)) return (None, b'', b'') if p.returncode", "above but we # return the default dev version of", "levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level += 1 if", "Returns `None` if the given path could not be determined", "a \"revision number\". show_warning : bool If True, issue a", "command is not run from within a git # repository", "and returncode == 129: # git returns 129 if a", "uses the directory containing that file. Returns ------- devversion :", "If given a filename it uses the directory containing that", "else: return '' elif sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip()", "the revision number (if `sha` is False), the SHA1 hash", "option is not supported # Also use --abbrev-commit and --abbrev=0", "`None`, the current working directory is used, and must be", "an error code, otherwise errors pass silently. path : str", ": str Either a string with the revision number (if", "stderr = run_git(cmd) # Fall back on the old method", "being imported directly from a git working copy. This ensures", "!= 0: if show_warning: warnings.warn('Git failed while determining revision '", "BSD style license - see LICENSE.rst \"\"\" Utilities for retrieving", "get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' + devstr else:", "returncode, stdout, stderr = run_git(cmd) if not sha and returncode", "function is tested but it is only ever executed within", "stderr) elif p.returncode != 0: if show_warning: warnings.warn('Git failed while", "git # repository tree. In this case, a warning is", "than ``levels`` (that is, if ``levels=0`` then the given path", "returns an error code, otherwise errors pass silently. path :", "particular this could happen in git versions older than 1.7.2", "the full SHA1 hash will be returned. Otherwise, the total", "current_level = 0 while levels is None or current_level <=", "import subprocess import warnings def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1]", "this repository. Parameters ---------- sha : bool If True, the", "= stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback text = stream.decode('latin1')", "a string, specifies the directory to look in to find", "BEGIN import locale import os import subprocess import warnings def", "not in git so just pass silently return version if", "by # astropy_helpers.version_helpers to determine the beginning of the code", "else: return _decode_stdio(stdout).strip() # This function is tested but it", "`sha` is True), or an empty string if git version", "present at {0!r}! Using ' 'default dev version.'.format(path)) return (p.returncode,", "a git working copy. This ensures that the revision number", "= os.path.abspath(os.path.dirname(path)) if sha: # Faster for getting just the", "if show_warning: warnings.warn('Your git looks old (does it support {0}?);", "UnicodeDecodeError: # Final fallback text = stream.decode('latin1') return text def", "is not run from within a git # repository tree.", "from a project's git repository. \"\"\" # Do not remove", "that the revision number in the version string is accurate.", "Probably not in git so just pass silently return version", "True), or an empty string if git version info could", "show_warning: warnings.warn('Git failed while determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return", "or an empty string if git version info could not", "update_git_devstr(version, path=None): \"\"\" Updates the git revision string if and", "cover \"\"\" Given a file or directory name, determine the", "path is under. If given, this won't look any higher", "parse; in # particular this could happen in git versions", "def _get_repo_path(pathname, levels=None): # pragma: no cover \"\"\" Given a", "tested but it is only ever executed within a subprocess", "be determined to belong to a git repo. \"\"\" if", "return None current_level = 0 while levels is None or", "= version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base", "not be determined to belong to a git repo. \"\"\"", "license - see LICENSE.rst \"\"\" Utilities for retrieving revision information", "issue a warning if git returns an error code, otherwise", "not run from within a git # repository tree. In", "Given a file or directory name, determine the root of", "if current_dir == os.path.dirname(current_dir): break current_dir = os.path.dirname(current_dir) return None", "info could not be identified. \"\"\" if path is None:", "in # this module # BEGIN import locale import os", "if p.returncode == 128: if show_warning: warnings.warn('No git repository present", "not remove the following comment; it is used by #", "produced above but we # return the default dev version", "True, issue a warning if git returns an error code,", "b'') if p.returncode == 129: if show_warning: warnings.warn('Your git looks", "'--abbrev=0', 'HEAD'] returncode, stdout, stderr = run_git(cmd) # Fall back", "minimum # number of characters needed per-commit (rather than the", "show_warning=False, path=path) return version_base + '.dev' + devstr else: #", "If a string, specifies the directory to look in to", "total count of commits in the repository will be used", "'default dev version.'.format(path)) return (p.returncode, b'', b'') if p.returncode ==", "in the version string is accurate. \"\"\" try: # Quick", "display the minimum # number of characters needed per-commit (rather", "path = os.path.abspath(os.path.dirname(path)) if sha: # Faster for getting just", "coverage metrics. def _get_repo_path(pathname, levels=None): # pragma: no cover \"\"\"", "not sha and returncode == 129: # git returns 129", "we're in git or not - returns '' if not", "128: if show_warning: warnings.warn('No git repository present at {0!r}! Using", "sha and returncode == 129: # git returns 129 if", "the root of the git repository this path is under.", "git revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False,", "if not devstr: # Probably not in git so just", "p.communicate() except OSError as e: if show_warning: warnings.warn('Error running git:", "'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif p.returncode != 0: if", "version return version def get_git_devstr(sha=False, show_warning=True, path=None): \"\"\" Determines the", "= ['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr = run_git(cmd)", "git repository. \"\"\" # Do not remove the following comment;", "run from within a git # repository tree. In this", "is under. If given, this won't look any higher than", "to determine if we're in git or not - returns", "# otherwise it's already the true/release version return version def", "won't look any higher than ``levels`` (that is, if ``levels=0``", "_get_repo_path(pathname, levels=None): # pragma: no cover \"\"\" Given a file", "silently return version if 'dev' in version: # update to", "hash of the current commit (if `sha` is True), or", "return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() # This function is tested", "error code, otherwise errors pass silently. path : str or", "`None` if the given path could not be determined to", "upgrading to v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr)", "Updates the git revision string if and only if the", "0: return str(stdout.count(b'\\n')) else: return '' elif sha: return _decode_stdio(stdout)[:40]", "``levels=0`` then the given path must be the root of", "1.7.2 # where the --count option is not supported #", "a subprocess when # creating a fake package, so it", "executed within a subprocess when # creating a fake package,", "128 if the command is not run from within a", "the current git revision version_base = version.split('.dev', 1)[0] devstr =", "b'', b'') if p.returncode == 128: if show_warning: warnings.warn('No git", "devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev' +", "revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path)", "if sha: # Faster for getting just the hash of", "p.returncode == 129: if show_warning: warnings.warn('Your git looks old (does", "if path is None: path = os.getcwd() if not os.path.isdir(path):", "version info could not be identified. \"\"\" if path is", "if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname)", "revision string if and only if the path is being", "getting all revisions and counting # the lines if returncode", "tree. In this case, a warning is produced above but", "'0'. return '0' elif not sha and returncode == 129:", "otherwise errors pass silently. path : str or None If", "not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version", "Determines the number of revisions in this repository. Parameters ----------", "os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level += 1 if current_dir ==", "['rev-list', '--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr = run_git(cmd) #", "while determining revision ' 'count: {0}'.format(_decode_stdio(stderr))) return (p.returncode, stdout, stderr)", "number in the version string is accurate. \"\"\" try: #", "p.returncode == 128: if show_warning: warnings.warn('No git repository present at", "returncode == 128: # git returns 128 if the command", "if the path is being imported directly from a git", "0: if show_warning: warnings.warn('Git failed while determining revision ' 'count:", "import os import subprocess import warnings def _decode_stdio(stream): try: stdio_encoding", "and is returned if so. Returns `None` if the given", "true/release version return version def get_git_devstr(sha=False, show_warning=True, path=None): \"\"\" Determines", "This ensures that the revision number in the version string", "If True, issue a warning if git returns an error", "git repository. If `None`, the current working directory is used,", "number of characters needed per-commit (rather than the full hash)", "'--abbrev-commit', '--abbrev=0', 'HEAD'] returncode, stdout, stderr = run_git(cmd) # Fall", "and must be the root of the git repository. If", "Do not remove the following comment; it is used by", "the --count option is not supported # Also use --abbrev-commit", "in # particular this could happen in git versions older", "show_warning : bool If True, issue a warning if git", "(p.returncode, b'', b'') if p.returncode == 129: if show_warning: warnings.warn('Your", "# Fall back on the old method of getting all", "the minimum # number of characters needed per-commit (rather than", "pragma: no cover \"\"\" Given a file or directory name,", "\"\"\" Determines the number of revisions in this repository. Parameters", "HEAD cmd = ['rev-parse', 'HEAD'] else: cmd = ['rev-list', '--count',", "current git revision version_base = version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False,", "to v1.7.2 or ' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif", "If `None`, the current working directory is used, and must", "= ['rev-list', '--count', 'HEAD'] def run_git(cmd): try: p = subprocess.Popen(['git']", "return version_base + '.dev' + devstr else: # otherwise it's", "or ' 'later.'.format(cmd[0])) return (p.returncode, stdout, stderr) elif p.returncode !=", "# git returns 129 if a command option failed to", "old method of getting all revisions and counting # the", "False), the SHA1 hash of the current commit (if `sha`", "OSError as e: if show_warning: warnings.warn('Error running git: ' +", "128: # git returns 128 if the command is not", "on the old method of getting all revisions and counting", "git returns an error code, otherwise errors pass silently. path", "version def get_git_devstr(sha=False, show_warning=True, path=None): \"\"\" Determines the number of", "---------- sha : bool If True, the full SHA1 hash", "than 1.7.2 # where the --count option is not supported", "string if and only if the path is being imported", "not - returns '' if not devstr = get_git_devstr(sha=True, show_warning=False,", "stderr) return p.returncode, stdout, stderr returncode, stdout, stderr = run_git(cmd)", "3-clause BSD style license - see LICENSE.rst \"\"\" Utilities for", "containing that file. Returns ------- devversion : str Either a", "\"\"\" Given a file or directory name, determine the root", "if a command option failed to parse; in # particular", "only if the path is being imported directly from a", "stream.decode(stdio_encoding) except UnicodeDecodeError: # Final fallback text = stream.decode('latin1') return", "# Final fallback text = stream.decode('latin1') return text def update_git_devstr(version,", "run_git(cmd) # Fall back on the old method of getting", "the total count of commits in the repository will be", "stdout, stderr = p.communicate() except OSError as e: if show_warning:", "returns 128 if the command is not run from within", "the git repository. If `None`, the current working directory is", "repository. \"\"\" # Do not remove the following comment; it", "This function is tested but it is only ever executed", "run_git(cmd) if not sha and returncode == 128: # git", "warnings def _decode_stdio(stream): try: stdio_encoding = locale.getdefaultlocale()[1] or 'utf-8' except", "devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return version if", "(p.returncode, stdout, stderr) return p.returncode, stdout, stderr returncode, stdout, stderr", "revision number (if `sha` is False), the SHA1 hash of", "1 if current_dir == os.path.dirname(current_dir): break current_dir = os.path.dirname(current_dir) return", "returns 129 if a command option failed to parse; in", "get picked up by coverage metrics. def _get_repo_path(pathname, levels=None): #", "# Probably not in git so just pass silently return", "\"\"\" # Do not remove the following comment; it is", "# pragma: no cover \"\"\" Given a file or directory", "so. Returns `None` if the given path could not be", "versions older than 1.7.2 # where the --count option is", "of characters needed per-commit (rather than the full hash) cmd", "the revision number in the version string is accurate. \"\"\"", "= os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir = os.path.abspath(pathname) else: return None", "to the current git revision version_base = version.split('.dev', 1)[0] devstr", "remove the following comment; it is used by # astropy_helpers.version_helpers", "# the lines if returncode == 0: return str(stdout.count(b'\\n')) else:", "Also use --abbrev-commit and --abbrev=0 to display the minimum #", "'dev' in version: # update to the current git revision", "file or directory name, determine the root of the git", "the command is not run from within a git #", "['rev-parse', 'HEAD'] else: cmd = ['rev-list', '--count', 'HEAD'] def run_git(cmd):", "'' elif sha: return _decode_stdio(stdout)[:40] else: return _decode_stdio(stdout).strip() # This", "<= levels: if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level += 1", "- returns '' if not devstr = get_git_devstr(sha=True, show_warning=False, path=path)", "this module # BEGIN import locale import os import subprocess", "e: if show_warning: warnings.warn('Error running git: ' + str(e)) return", "text def update_git_devstr(version, path=None): \"\"\" Updates the git revision string", "except OSError: return version if not devstr: # Probably not", "revision information from a project's git repository. \"\"\" # Do", "repo. \"\"\" if os.path.isfile(pathname): current_dir = os.path.abspath(os.path.dirname(pathname)) elif os.path.isdir(pathname): current_dir", "of the git repository and is returned if so. Returns", "the path is being imported directly from a git working", "If True, the full SHA1 hash will be returned. Otherwise,", "path is None: path = os.getcwd() if not os.path.isdir(path): path", "(None, b'', b'') if p.returncode == 128: if show_warning: warnings.warn('No", "Utilities for retrieving revision information from a project's git repository.", "for retrieving revision information from a project's git repository. \"\"\"", "information from a project's git repository. \"\"\" # Do not", "number of revisions in this repository. Parameters ---------- sha :", "stream.decode('latin1') return text def update_git_devstr(version, path=None): \"\"\" Updates the git", "if not devstr = get_git_devstr(sha=True, show_warning=False, path=path) except OSError: return", "if os.path.exists(os.path.join(current_dir, '.git')): return current_dir current_level += 1 if current_dir", "of the code in # this module # BEGIN import", ": str or None If a string, specifies the directory", "return the default dev version of '0'. return '0' elif", "Faster for getting just the hash of HEAD cmd =", "\"\"\" try: # Quick way to determine if we're in", "Using ' 'default dev version.'.format(path)) return (p.returncode, b'', b'') if", "of the current commit (if `sha` is True), or an", "the given path must be the root of the git", "and --abbrev=0 to display the minimum # number of characters", "a warning is produced above but we # return the", "= os.path.abspath(pathname) else: return None current_level = 0 while levels", "name, determine the root of the git repository this path", "no cover \"\"\" Given a file or directory name, determine", "used, and must be the root of the git repository.", "{0}?); ' 'consider upgrading to v1.7.2 or ' 'later.'.format(cmd[0])) return", ": bool If True, issue a warning if git returns", "path=path) return version_base + '.dev' + devstr else: # otherwise", "git looks old (does it support {0}?); ' 'consider upgrading", "except OSError as e: if show_warning: warnings.warn('Error running git: '", "so just pass silently return version if 'dev' in version:", "129: # git returns 129 if a command option failed", "directly from a git working copy. This ensures that the", "SHA1 hash of the current commit (if `sha` is True),", "'--count', 'HEAD'] def run_git(cmd): try: p = subprocess.Popen(['git'] + cmd,", "stdout, stderr = run_git(cmd) # Fall back on the old", "if so. Returns `None` if the given path could not", "to look in to find the git repository. If `None`,", "is None or current_level <= levels: if os.path.exists(os.path.join(current_dir, '.git')): return", "default dev version of '0'. return '0' elif not sha", "os import subprocess import warnings def _decode_stdio(stream): try: stdio_encoding =", "version: # update to the current git revision version_base =", "git working copy. This ensures that the revision number in", "If given, this won't look any higher than ``levels`` (that", "needed per-commit (rather than the full hash) cmd = ['rev-list',", "# number of characters needed per-commit (rather than the full", "1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base + '.dev'", "the git repository and is returned if so. Returns `None`", "revisions in this repository. Parameters ---------- sha : bool If", "str(stdout.count(b'\\n')) else: return '' elif sha: return _decode_stdio(stdout)[:40] else: return", "picked up by coverage metrics. def _get_repo_path(pathname, levels=None): # pragma:", "cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) stdout, stderr = p.communicate() except OSError", "option failed to parse; in # particular this could happen", "'.dev' + devstr else: # otherwise it's already the true/release", "in git so just pass silently return version if 'dev'", "version.split('.dev', 1)[0] devstr = get_git_devstr(sha=False, show_warning=False, path=path) return version_base +" ]
[ "control''' from sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test, runCommandClient try: # Python", "# noqa except NameError: pass run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc') input(\"Wait before running", "test\") print('Set saturation value') runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0, 0.01, 0.0,", "value') runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0, 0.01, 0.0, 0.0, 0.0]') input(\"Wait", "except NameError: pass run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc') input(\"Wait before running the test\")", "input = raw_input # noqa except NameError: pass run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc')", "raw_input # noqa except NameError: pass run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc') input(\"Wait before", "2 input = raw_input # noqa except NameError: pass run_test('appli_feet_admittance.py')", "= raw_input # noqa except NameError: pass run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc') input(\"Wait", "NameError: pass run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc') input(\"Wait before running the test\") print('Set", "0.0, 0.01, 0.0, 0.0, 0.0]') input(\"Wait before dumping the data\")", "feet admittance control''' from sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test, runCommandClient try:", "noqa except NameError: pass run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc') input(\"Wait before running the", "admittance control''' from sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test, runCommandClient try: #", "[0.0, 0.0, 0.01, 0.0, 0.0, 0.0]') input(\"Wait before dumping the", "from sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test, runCommandClient try: # Python 2", "run_ft_calibration, run_test, runCommandClient try: # Python 2 input = raw_input", "Python 2 input = raw_input # noqa except NameError: pass", "runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0, 0.01, 0.0, 0.0, 0.0]') input(\"Wait before", "pass run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc') input(\"Wait before running the test\") print('Set saturation", "<reponame>imaroger/sot-talos-balance '''Test feet admittance control''' from sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test,", "input(\"Wait before running the test\") print('Set saturation value') runCommandClient('robot.admBF_dqSaturation.sin.value =", "# Python 2 input = raw_input # noqa except NameError:", "run_test, runCommandClient try: # Python 2 input = raw_input #", "runCommandClient try: # Python 2 input = raw_input # noqa", "= [0.0, 0.0, 0.01, 0.0, 0.0, 0.0]') input(\"Wait before dumping", "import run_ft_calibration, run_test, runCommandClient try: # Python 2 input =", "print('Set saturation value') runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0, 0.01, 0.0, 0.0,", "running the test\") print('Set saturation value') runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0,", "sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test, runCommandClient try: # Python 2 input", "run_test('appli_feet_admittance.py') run_ft_calibration('robot.ftc') input(\"Wait before running the test\") print('Set saturation value')", "0.01, 0.0, 0.0, 0.0]') input(\"Wait before dumping the data\") runCommandClient('dump_tracer(robot.tracer)')", "'''Test feet admittance control''' from sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test, runCommandClient", "saturation value') runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0, 0.01, 0.0, 0.0, 0.0]')", "run_ft_calibration('robot.ftc') input(\"Wait before running the test\") print('Set saturation value') runCommandClient('robot.admBF_dqSaturation.sin.value", "the test\") print('Set saturation value') runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0, 0.01,", "try: # Python 2 input = raw_input # noqa except", "before running the test\") print('Set saturation value') runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0," ]
[ "db.get(b'dave') assert(b'one' == result) db.set(b'dave',b'two') result = db.get(b'dave') assert(b'two' ==", "result) assert(None == db.get(b'doesntexist')) assert(db.exists(b'dave')) db.delete(b'dave') assert(db.exists(b'dave') == False) if", "assert(b'two' == result) assert(None == db.get(b'doesntexist')) assert(db.exists(b'dave')) db.delete(b'dave') assert(db.exists(b'dave') ==", "db.get(b'dave') assert(b'two' == result) assert(None == db.get(b'doesntexist')) assert(db.exists(b'dave')) db.delete(b'dave') assert(db.exists(b'dave')", "home_dir def test_database(): dbfile = home_dir('temp', 'test.db') db = VanillaDB(dbfile)", "assert(None == db.get(b'doesntexist')) assert(db.exists(b'dave')) db.delete(b'dave') assert(db.exists(b'dave') == False) if os.path.exists(dbfile):", "== result) db.set(b'dave',b'two') result = db.get(b'dave') assert(b'two' == result) assert(None", "== db.get(b'doesntexist')) assert(db.exists(b'dave')) db.delete(b'dave') assert(db.exists(b'dave') == False) if os.path.exists(dbfile): os.remove(dbfile)", "result = db.get(b'dave') assert(b'one' == result) db.set(b'dave',b'two') result = db.get(b'dave')", "from tendermint.utils import home_dir def test_database(): dbfile = home_dir('temp', 'test.db')", "VanillaDB(dbfile) db.set(b'dave',b'one') result = db.get(b'dave') assert(b'one' == result) db.set(b'dave',b'two') result", "== result) assert(None == db.get(b'doesntexist')) assert(db.exists(b'dave')) db.delete(b'dave') assert(db.exists(b'dave') == False)", "assert(b'one' == result) db.set(b'dave',b'two') result = db.get(b'dave') assert(b'two' == result)", "= VanillaDB(dbfile) db.set(b'dave',b'one') result = db.get(b'dave') assert(b'one' == result) db.set(b'dave',b'two')", "tendermint.utils import home_dir def test_database(): dbfile = home_dir('temp', 'test.db') db", "result = db.get(b'dave') assert(b'two' == result) assert(None == db.get(b'doesntexist')) assert(db.exists(b'dave'))", "import home_dir def test_database(): dbfile = home_dir('temp', 'test.db') db =", "import os from tendermint.db import VanillaDB from tendermint.utils import home_dir", "from tendermint.db import VanillaDB from tendermint.utils import home_dir def test_database():", "db = VanillaDB(dbfile) db.set(b'dave',b'one') result = db.get(b'dave') assert(b'one' == result)", "home_dir('temp', 'test.db') db = VanillaDB(dbfile) db.set(b'dave',b'one') result = db.get(b'dave') assert(b'one'", "def test_database(): dbfile = home_dir('temp', 'test.db') db = VanillaDB(dbfile) db.set(b'dave',b'one')", "db.set(b'dave',b'one') result = db.get(b'dave') assert(b'one' == result) db.set(b'dave',b'two') result =", "'test.db') db = VanillaDB(dbfile) db.set(b'dave',b'one') result = db.get(b'dave') assert(b'one' ==", "db.set(b'dave',b'two') result = db.get(b'dave') assert(b'two' == result) assert(None == db.get(b'doesntexist'))", "VanillaDB from tendermint.utils import home_dir def test_database(): dbfile = home_dir('temp',", "= home_dir('temp', 'test.db') db = VanillaDB(dbfile) db.set(b'dave',b'one') result = db.get(b'dave')", "import VanillaDB from tendermint.utils import home_dir def test_database(): dbfile =", "os from tendermint.db import VanillaDB from tendermint.utils import home_dir def", "= db.get(b'dave') assert(b'two' == result) assert(None == db.get(b'doesntexist')) assert(db.exists(b'dave')) db.delete(b'dave')", "dbfile = home_dir('temp', 'test.db') db = VanillaDB(dbfile) db.set(b'dave',b'one') result =", "<reponame>davebryson/py-tendermint import os from tendermint.db import VanillaDB from tendermint.utils import", "= db.get(b'dave') assert(b'one' == result) db.set(b'dave',b'two') result = db.get(b'dave') assert(b'two'", "tendermint.db import VanillaDB from tendermint.utils import home_dir def test_database(): dbfile", "result) db.set(b'dave',b'two') result = db.get(b'dave') assert(b'two' == result) assert(None ==", "test_database(): dbfile = home_dir('temp', 'test.db') db = VanillaDB(dbfile) db.set(b'dave',b'one') result" ]
[ "password are similar response = c.post('/accounts/register/', { 'username': 'asdasdasd0', 'password1':", "'username': 'asdasdasd1', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) #", "self.assertEquals(response.status_code, 200) # no password response = c.post('/accounts/register/', { 'username':", "response = c.post('/accounts/register/', { 'username': '', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>'", "200) # username is empty response = c.post('/accounts/register/', { 'username':", "'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) # no password response =", "passwords don't match response = c.post('/accounts/register/', { 'username': 'asdasdasd1', 'password1':", "password response = c.post('/accounts/register/', { 'username': 'asdasdasd', 'password1': '', 'password2':", "response = c.post('/accounts/register/', { 'username': 'asdasdasd', 'password1': '', 'password2': ''", "on success redirects to / response = c.post('/accounts/register/', { 'username':", "200) # username and password are similar response = c.post('/accounts/register/',", "test_register(self): c = Client() # on success redirects to /", "{ 'username': 'asdas', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertRedirects(response, '/')", "response = c.post('/accounts/register/', { 'username': 'asdasdasd1', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>'", "from django.test import TestCase from django.test import Client class RegisterTestCase(TestCase):", "from django.test import Client class RegisterTestCase(TestCase): def test_register(self): c =", "import TestCase from django.test import Client class RegisterTestCase(TestCase): def test_register(self):", "}) self.assertRedirects(response, '/') # passwords don't match response = c.post('/accounts/register/',", "'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) # no password", "= c.post('/accounts/register/', { 'username': '', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' })", "= c.post('/accounts/register/', { 'username': 'asdasdasd1', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' })", "'', 'password2': '' }) self.assertEquals(response.status_code, 200) # username and password", "Client() # on success redirects to / response = c.post('/accounts/register/',", "'password2': '<PASSWORD>' }) self.assertRedirects(response, '/') # passwords don't match response", "'username': '', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) #", "c.post('/accounts/register/', { 'username': 'asdas', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertRedirects(response,", "{ 'username': 'asdasdasd1', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200)", "empty response = c.post('/accounts/register/', { 'username': '', 'password1': '<PASSWORD>', 'password2':", "'password1': '', 'password2': '' }) self.assertEquals(response.status_code, 200) # username and", "'<PASSWORD>' }) self.assertRedirects(response, '/') # passwords don't match response =", "# username is empty response = c.post('/accounts/register/', { 'username': '',", "= Client() # on success redirects to / response =", "'asdasdasd', 'password1': '', 'password2': '' }) self.assertEquals(response.status_code, 200) # username", "similar response = c.post('/accounts/register/', { 'username': 'asdasdasd0', 'password1': '<PASSWORD>', 'password2':", "is empty response = c.post('/accounts/register/', { 'username': '', 'password1': '<PASSWORD>',", "class RegisterTestCase(TestCase): def test_register(self): c = Client() # on success", "redirects to / response = c.post('/accounts/register/', { 'username': 'asdas', 'password1':", "= c.post('/accounts/register/', { 'username': 'asdasdasd0', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' })", "are similar response = c.post('/accounts/register/', { 'username': 'asdasdasd0', 'password1': '<PASSWORD>',", "Client class RegisterTestCase(TestCase): def test_register(self): c = Client() # on", "c.post('/accounts/register/', { 'username': 'asdasdasd1', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code,", "django.test import Client class RegisterTestCase(TestCase): def test_register(self): c = Client()", "}) self.assertEquals(response.status_code, 200) # username is empty response = c.post('/accounts/register/',", "no password response = c.post('/accounts/register/', { 'username': 'asdasdasd', 'password1': '',", "response = c.post('/accounts/register/', { 'username': 'asdasdasd0', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>'", "{ 'username': 'asdasdasd', 'password1': '', 'password2': '' }) self.assertEquals(response.status_code, 200)", "'<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertRedirects(response, '/') # passwords don't match", "'' }) self.assertEquals(response.status_code, 200) # username and password are similar", "'<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) # no password response", "don't match response = c.post('/accounts/register/', { 'username': 'asdasdasd1', 'password1': '<PASSWORD>',", "TestCase from django.test import Client class RegisterTestCase(TestCase): def test_register(self): c", "username and password are similar response = c.post('/accounts/register/', { 'username':", "'<PASSWORD>' }) self.assertEquals(response.status_code, 200) # no password response = c.post('/accounts/register/',", "'<PASSWORD>' }) self.assertEquals(response.status_code, 200) # username is empty response =", "= c.post('/accounts/register/', { 'username': 'asdas', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' })", "# on success redirects to / response = c.post('/accounts/register/', {", "c.post('/accounts/register/', { 'username': 'asdasdasd', 'password1': '', 'password2': '' }) self.assertEquals(response.status_code,", "}) self.assertEquals(response.status_code, 200) # username and password are similar response", "'password2': '' }) self.assertEquals(response.status_code, 200) # username and password are", "}) self.assertEquals(response.status_code, 200) # no password response = c.post('/accounts/register/', {", "def test_register(self): c = Client() # on success redirects to", "c.post('/accounts/register/', { 'username': '', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code,", "match response = c.post('/accounts/register/', { 'username': 'asdasdasd1', 'password1': '<PASSWORD>', 'password2':", "'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) # username is", "import Client class RegisterTestCase(TestCase): def test_register(self): c = Client() #", "'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertRedirects(response, '/') # passwords don't", "200) # no password response = c.post('/accounts/register/', { 'username': 'asdasdasd',", "and password are similar response = c.post('/accounts/register/', { 'username': 'asdasdasd0',", "# passwords don't match response = c.post('/accounts/register/', { 'username': 'asdasdasd1',", "success redirects to / response = c.post('/accounts/register/', { 'username': 'asdas',", "self.assertEquals(response.status_code, 200) # username and password are similar response =", "c = Client() # on success redirects to / response", "response = c.post('/accounts/register/', { 'username': 'asdas', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>'", "# username and password are similar response = c.post('/accounts/register/', {", "c.post('/accounts/register/', { 'username': 'asdasdasd0', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code,", "{ 'username': '', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200)", "{ 'username': 'asdasdasd0', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200)", "'asdasdasd1', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) # username", "'username': 'asdas', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertRedirects(response, '/') #", "'username': 'asdasdasd', 'password1': '', 'password2': '' }) self.assertEquals(response.status_code, 200) #", "'asdas', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertRedirects(response, '/') # passwords", "'<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) # username is empty", "self.assertRedirects(response, '/') # passwords don't match response = c.post('/accounts/register/', {", "/ response = c.post('/accounts/register/', { 'username': 'asdas', 'password1': '<PASSWORD>', 'password2':", "'/') # passwords don't match response = c.post('/accounts/register/', { 'username':", "# no password response = c.post('/accounts/register/', { 'username': 'asdasdasd', 'password1':", "django.test import TestCase from django.test import Client class RegisterTestCase(TestCase): def", "= c.post('/accounts/register/', { 'username': 'asdasdasd', 'password1': '', 'password2': '' })", "RegisterTestCase(TestCase): def test_register(self): c = Client() # on success redirects", "username is empty response = c.post('/accounts/register/', { 'username': '', 'password1':", "'', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) # no", "'password2': '<PASSWORD>' }) self.assertEquals(response.status_code, 200) # username is empty response", "self.assertEquals(response.status_code, 200) # username is empty response = c.post('/accounts/register/', {", "to / response = c.post('/accounts/register/', { 'username': 'asdas', 'password1': '<PASSWORD>'," ]
[ "copy import math from typing import Optional, List import torch", "prior_prob) self._reset_parameters() def _reset_parameters(self): # init all parameters. for p", "torch import nn, Tensor import torch.nn.functional as F from detectron2.modeling.poolers", "- pred_ltrb[:,0,:,:] # x1 pred_boxes[:,1,:,:] = locations[:,1,:,:] - pred_ltrb[:,1,:,:] #", "_get_activation_fn(activation) self.feat1 = nn.Conv2d(self.d_model, self.d_model, kernel_size=3, stride=1, padding=1) self.cls_score =", "= shift_y.reshape(-1) locations = torch.stack((shift_x, shift_y), dim=1) + stride //", "kernel_size=3, stride=1, padding=1) self.ltrb_pred = nn.Conv2d(d_model, 4, kernel_size=3, stride=1, padding=1)", "features.device shifts_x = torch.arange( 0, w * stride, step=stride, dtype=torch.float32,", "2).permute(2, 0, 1) return locations def _get_activation_fn(activation): \"\"\"Return an activation", "1: nn.init.xavier_uniform_(p) # initialize the bias for focal loss. if", "h * stride, step=stride, dtype=torch.float32, device=device ) shift_y, shift_x =", "dim=1) + stride // 2 locations = locations.reshape(h, w, 2).permute(2,", "all parameters. for p in self.parameters(): if p.dim() > 1:", "activation == \"glu\": return F.glu raise RuntimeError(F\"activation should be relu/gelu,", "* positional encodings are passed in MHattention * extra LN", "nn.init.xavier_uniform_(p) # initialize the bias for focal loss. if p.shape[-1]", "torch.nn.functional as F from detectron2.modeling.poolers import ROIPooler, cat from detectron2.structures", "0, 1) return locations def _get_activation_fn(activation): \"\"\"Return an activation function", "torch.meshgrid(shifts_y, shifts_x) shift_x = shift_x.reshape(-1) shift_y = shift_y.reshape(-1) locations =", "= num_classes self.d_model = d_model self.num_classes = num_classes self.activation =", "self.activation = _get_activation_fn(activation) self.feat1 = nn.Conv2d(self.d_model, self.d_model, kernel_size=3, stride=1, padding=1)", "w, 2).permute(2, 0, 1) return locations def _get_activation_fn(activation): \"\"\"Return an", "= CenternetDeconv(cfg, backbone_shape) self.num_classes = num_classes self.d_model = d_model self.num_classes", "locations: (2, H, W) \"\"\" h, w = features.size()[-2:] device", "an activation function given a string\"\"\" if activation == \"relu\":", "layers \"\"\" import copy import math from typing import Optional,", "+ pred_ltrb[:,3,:,:] # y2 return pred_boxes @torch.no_grad() def locations(self, features,", "from all decoding layers \"\"\" import copy import math from", "self.bias_value = -math.log((1 - prior_prob) / prior_prob) self._reset_parameters() def _reset_parameters(self):", "CenternetDeconv class Head(nn.Module): def __init__(self, cfg, backbone_shape=[2048, 1024, 512, 256]):", "is removed * decoder returns a stack of activations from", "pred_ltrb = F.relu(self.ltrb_pred(feat)) pred_bboxes = self.apply_ltrb(locations, pred_ltrb) return class_logits, pred_bboxes", "* decoder returns a stack of activations from all decoding", "features = self.deconv(features_list) locations = self.locations(features)[None] feat = self.activation(self.feat1(features)) class_logits", "<NAME> # Contact: <EMAIL> # # Copyright (c) Facebook, Inc.", "self.feat1 = nn.Conv2d(self.d_model, self.d_model, kernel_size=3, stride=1, padding=1) self.cls_score = nn.Conv2d(d_model,", "in MHattention * extra LN at the end of encoder", "super().__init__() # Build heads. num_classes = cfg.MODEL.OneNet.NUM_CLASSES d_model = cfg.MODEL.OneNet.DECONV_CHANNEL[-1]", "== \"glu\": return F.glu raise RuntimeError(F\"activation should be relu/gelu, not", "y1 pred_boxes[:,2,:,:] = locations[:,0,:,:] + pred_ltrb[:,2,:,:] # x2 pred_boxes[:,3,:,:] =", "pred_bboxes = self.apply_ltrb(locations, pred_ltrb) return class_logits, pred_bboxes def apply_ltrb(self, locations,", "== \"gelu\": return F.gelu if activation == \"glu\": return F.glu", "# Init parameters. prior_prob = cfg.MODEL.OneNet.PRIOR_PROB self.bias_value = -math.log((1 -", "activation == \"relu\": return F.relu if activation == \"gelu\": return", "\"\"\" OneNet Transformer class. Copy-paste from torch.nn.Transformer with modifications: *", "return locations def _get_activation_fn(activation): \"\"\"Return an activation function given a", "dtype=torch.float32, device=device ) shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) shift_x =", "encoder is removed * decoder returns a stack of activations", "from torch.nn.Transformer with modifications: * positional encodings are passed in", "(c) Facebook, Inc. and its affiliates. All Rights Reserved \"\"\"", "\"\"\" pred_boxes = torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:] = locations[:,0,:,:] - pred_ltrb[:,0,:,:] #", "shift_y = shift_y.reshape(-1) locations = torch.stack((shift_x, shift_y), dim=1) + stride", "a stack of activations from all decoding layers \"\"\" import", "given a string\"\"\" if activation == \"relu\": return F.relu if", "torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:] = locations[:,0,:,:] - pred_ltrb[:,0,:,:] # x1 pred_boxes[:,1,:,:] =", "import nn, Tensor import torch.nn.functional as F from detectron2.modeling.poolers import", "Build heads. num_classes = cfg.MODEL.OneNet.NUM_CLASSES d_model = cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation =", "self.d_model = d_model self.num_classes = num_classes self.activation = _get_activation_fn(activation) self.feat1", "stride=1, padding=1) self.ltrb_pred = nn.Conv2d(d_model, 4, kernel_size=3, stride=1, padding=1) #", "self.activation(self.feat1(features)) class_logits = self.cls_score(feat) pred_ltrb = F.relu(self.ltrb_pred(feat)) pred_bboxes = self.apply_ltrb(locations,", "(1, 2, H, W) :param pred_ltrb: (N, 4, H, W)", "def apply_ltrb(self, locations, pred_ltrb): \"\"\" :param locations: (1, 2, H,", "import Boxes from .deconv import CenternetDeconv class Head(nn.Module): def __init__(self,", "= _get_activation_fn(activation) self.feat1 = nn.Conv2d(self.d_model, self.d_model, kernel_size=3, stride=1, padding=1) self.cls_score", "nn, Tensor import torch.nn.functional as F from detectron2.modeling.poolers import ROIPooler,", "at the end of encoder is removed * decoder returns", "+ pred_ltrb[:,2,:,:] # x2 pred_boxes[:,3,:,:] = locations[:,1,:,:] + pred_ltrb[:,3,:,:] #", "parameters. for p in self.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p)", "the end of encoder is removed * decoder returns a", "def _get_activation_fn(activation): \"\"\"Return an activation function given a string\"\"\" if", "= self.cls_score(feat) pred_ltrb = F.relu(self.ltrb_pred(feat)) pred_bboxes = self.apply_ltrb(locations, pred_ltrb) return", "MHattention * extra LN at the end of encoder is", "cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation = cfg.MODEL.OneNet.ACTIVATION self.deconv = CenternetDeconv(cfg, backbone_shape) self.num_classes =", "y2 return pred_boxes @torch.no_grad() def locations(self, features, stride=4): \"\"\" Arguments:", "shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) shift_x = shift_x.reshape(-1) shift_y =", "H, W) \"\"\" h, w = features.size()[-2:] device = features.device", "nn.Conv2d(d_model, num_classes, kernel_size=3, stride=1, padding=1) self.ltrb_pred = nn.Conv2d(d_model, 4, kernel_size=3,", "cat from detectron2.structures import Boxes from .deconv import CenternetDeconv class", "self.num_classes: nn.init.constant_(p, self.bias_value) def forward(self, features_list): features = self.deconv(features_list) locations", "forward(self, features_list): features = self.deconv(features_list) locations = self.locations(features)[None] feat =", "// 2 locations = locations.reshape(h, w, 2).permute(2, 0, 1) return", "F from detectron2.modeling.poolers import ROIPooler, cat from detectron2.structures import Boxes", "= cfg.MODEL.OneNet.ACTIVATION self.deconv = CenternetDeconv(cfg, backbone_shape) self.num_classes = num_classes self.d_model", "torch.arange( 0, w * stride, step=stride, dtype=torch.float32, device=device ) shifts_y", "2, H, W) :param pred_ltrb: (N, 4, H, W) \"\"\"", "decoding layers \"\"\" import copy import math from typing import", "removed * decoder returns a stack of activations from all", "stride=1, padding=1) # Init parameters. prior_prob = cfg.MODEL.OneNet.PRIOR_PROB self.bias_value =", "Return: locations: (2, H, W) \"\"\" h, w = features.size()[-2:]", "Copy-paste from torch.nn.Transformer with modifications: * positional encodings are passed", "focal loss. if p.shape[-1] == self.num_classes: nn.init.constant_(p, self.bias_value) def forward(self,", "cfg.MODEL.OneNet.NUM_CLASSES d_model = cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation = cfg.MODEL.OneNet.ACTIVATION self.deconv = CenternetDeconv(cfg,", "p in self.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) # initialize", "import copy import math from typing import Optional, List import", "torch.nn.Transformer with modifications: * positional encodings are passed in MHattention", "pred_boxes[:,3,:,:] = locations[:,1,:,:] + pred_ltrb[:,3,:,:] # y2 return pred_boxes @torch.no_grad()", "= torch.arange( 0, w * stride, step=stride, dtype=torch.float32, device=device )", "= torch.stack((shift_x, shift_y), dim=1) + stride // 2 locations =", "_get_activation_fn(activation): \"\"\"Return an activation function given a string\"\"\" if activation", "pred_bboxes def apply_ltrb(self, locations, pred_ltrb): \"\"\" :param locations: (1, 2,", "the bias for focal loss. if p.shape[-1] == self.num_classes: nn.init.constant_(p,", "self.num_classes = num_classes self.d_model = d_model self.num_classes = num_classes self.activation", "self.num_classes = num_classes self.activation = _get_activation_fn(activation) self.feat1 = nn.Conv2d(self.d_model, self.d_model,", "self.bias_value) def forward(self, features_list): features = self.deconv(features_list) locations = self.locations(features)[None]", "num_classes, kernel_size=3, stride=1, padding=1) self.ltrb_pred = nn.Conv2d(d_model, 4, kernel_size=3, stride=1,", "loss. if p.shape[-1] == self.num_classes: nn.init.constant_(p, self.bias_value) def forward(self, features_list):", "H, W) Return: locations: (2, H, W) \"\"\" h, w", "if activation == \"glu\": return F.glu raise RuntimeError(F\"activation should be", "self.apply_ltrb(locations, pred_ltrb) return class_logits, pred_bboxes def apply_ltrb(self, locations, pred_ltrb): \"\"\"", "device=device ) shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) shift_x = shift_x.reshape(-1)", "class_logits, pred_bboxes def apply_ltrb(self, locations, pred_ltrb): \"\"\" :param locations: (1,", "from detectron2.modeling.poolers import ROIPooler, cat from detectron2.structures import Boxes from", "features_list): features = self.deconv(features_list) locations = self.locations(features)[None] feat = self.activation(self.feat1(features))", "shifts_x) shift_x = shift_x.reshape(-1) shift_y = shift_y.reshape(-1) locations = torch.stack((shift_x,", "function given a string\"\"\" if activation == \"relu\": return F.relu", "W) \"\"\" pred_boxes = torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:] = locations[:,0,:,:] - pred_ltrb[:,0,:,:]", "512, 256]): super().__init__() # Build heads. num_classes = cfg.MODEL.OneNet.NUM_CLASSES d_model", "import torch from torch import nn, Tensor import torch.nn.functional as", "import math from typing import Optional, List import torch from", "pred_boxes[:,2,:,:] = locations[:,0,:,:] + pred_ltrb[:,2,:,:] # x2 pred_boxes[:,3,:,:] = locations[:,1,:,:]", "locations[:,1,:,:] - pred_ltrb[:,1,:,:] # y1 pred_boxes[:,2,:,:] = locations[:,0,:,:] + pred_ltrb[:,2,:,:]", "= locations[:,0,:,:] + pred_ltrb[:,2,:,:] # x2 pred_boxes[:,3,:,:] = locations[:,1,:,:] +", "shift_y.reshape(-1) locations = torch.stack((shift_x, shift_y), dim=1) + stride // 2", "Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved", "shift_x = shift_x.reshape(-1) shift_y = shift_y.reshape(-1) locations = torch.stack((shift_x, shift_y),", "feat = self.activation(self.feat1(features)) class_logits = self.cls_score(feat) pred_ltrb = F.relu(self.ltrb_pred(feat)) pred_bboxes", "nn.Conv2d(d_model, 4, kernel_size=3, stride=1, padding=1) # Init parameters. prior_prob =", "1024, 512, 256]): super().__init__() # Build heads. num_classes = cfg.MODEL.OneNet.NUM_CLASSES", "padding=1) # Init parameters. prior_prob = cfg.MODEL.OneNet.PRIOR_PROB self.bias_value = -math.log((1", "stride=4): \"\"\" Arguments: features: (N, C, H, W) Return: locations:", "CenternetDeconv(cfg, backbone_shape) self.num_classes = num_classes self.d_model = d_model self.num_classes =", "prior_prob) / prior_prob) self._reset_parameters() def _reset_parameters(self): # init all parameters.", "= features.size()[-2:] device = features.device shifts_x = torch.arange( 0, w", "# initialize the bias for focal loss. if p.shape[-1] ==", "padding=1) self.ltrb_pred = nn.Conv2d(d_model, 4, kernel_size=3, stride=1, padding=1) # Init", "def locations(self, features, stride=4): \"\"\" Arguments: features: (N, C, H,", "decoder returns a stack of activations from all decoding layers", "# Contact: <EMAIL> # # Copyright (c) Facebook, Inc. and", "= cfg.MODEL.OneNet.PRIOR_PROB self.bias_value = -math.log((1 - prior_prob) / prior_prob) self._reset_parameters()", "0, h * stride, step=stride, dtype=torch.float32, device=device ) shift_y, shift_x", "locations.reshape(h, w, 2).permute(2, 0, 1) return locations def _get_activation_fn(activation): \"\"\"Return", "locations[:,1,:,:] + pred_ltrb[:,3,:,:] # y2 return pred_boxes @torch.no_grad() def locations(self,", "= features.device shifts_x = torch.arange( 0, w * stride, step=stride,", "as F from detectron2.modeling.poolers import ROIPooler, cat from detectron2.structures import", "/ prior_prob) self._reset_parameters() def _reset_parameters(self): # init all parameters. for", "self.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) # initialize the bias", "import ROIPooler, cat from detectron2.structures import Boxes from .deconv import", "def _reset_parameters(self): # init all parameters. for p in self.parameters():", "import torch.nn.functional as F from detectron2.modeling.poolers import ROIPooler, cat from", "modifications: * positional encodings are passed in MHattention * extra", "1) return locations def _get_activation_fn(activation): \"\"\"Return an activation function given", "if p.dim() > 1: nn.init.xavier_uniform_(p) # initialize the bias for", "torch.arange( 0, h * stride, step=stride, dtype=torch.float32, device=device ) shift_y,", "of encoder is removed * decoder returns a stack of", "List import torch from torch import nn, Tensor import torch.nn.functional", "LN at the end of encoder is removed * decoder", "parameters. prior_prob = cfg.MODEL.OneNet.PRIOR_PROB self.bias_value = -math.log((1 - prior_prob) /", "shift_y), dim=1) + stride // 2 locations = locations.reshape(h, w,", "# y2 return pred_boxes @torch.no_grad() def locations(self, features, stride=4): \"\"\"", "string\"\"\" if activation == \"relu\": return F.relu if activation ==", "apply_ltrb(self, locations, pred_ltrb): \"\"\" :param locations: (1, 2, H, W)", "for p in self.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) #", "if p.shape[-1] == self.num_classes: nn.init.constant_(p, self.bias_value) def forward(self, features_list): features", "class Head(nn.Module): def __init__(self, cfg, backbone_shape=[2048, 1024, 512, 256]): super().__init__()", "pred_boxes[:,0,:,:] = locations[:,0,:,:] - pred_ltrb[:,0,:,:] # x1 pred_boxes[:,1,:,:] = locations[:,1,:,:]", "return F.gelu if activation == \"glu\": return F.glu raise RuntimeError(F\"activation", "= self.deconv(features_list) locations = self.locations(features)[None] feat = self.activation(self.feat1(features)) class_logits =", "Arguments: features: (N, C, H, W) Return: locations: (2, H,", "= self.apply_ltrb(locations, pred_ltrb) return class_logits, pred_bboxes def apply_ltrb(self, locations, pred_ltrb):", "import Optional, List import torch from torch import nn, Tensor", "kernel_size=3, stride=1, padding=1) # Init parameters. prior_prob = cfg.MODEL.OneNet.PRIOR_PROB self.bias_value", "Reserved \"\"\" OneNet Transformer class. Copy-paste from torch.nn.Transformer with modifications:", "heads. num_classes = cfg.MODEL.OneNet.NUM_CLASSES d_model = cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation = cfg.MODEL.OneNet.ACTIVATION", "= d_model self.num_classes = num_classes self.activation = _get_activation_fn(activation) self.feat1 =", "backbone_shape) self.num_classes = num_classes self.d_model = d_model self.num_classes = num_classes", "p.shape[-1] == self.num_classes: nn.init.constant_(p, self.bias_value) def forward(self, features_list): features =", "# Copyright (c) Facebook, Inc. and its affiliates. All Rights", "F.relu(self.ltrb_pred(feat)) pred_bboxes = self.apply_ltrb(locations, pred_ltrb) return class_logits, pred_bboxes def apply_ltrb(self,", "4, kernel_size=3, stride=1, padding=1) # Init parameters. prior_prob = cfg.MODEL.OneNet.PRIOR_PROB", "self.deconv = CenternetDeconv(cfg, backbone_shape) self.num_classes = num_classes self.d_model = d_model", "initialize the bias for focal loss. if p.shape[-1] == self.num_classes:", "its affiliates. All Rights Reserved \"\"\" OneNet Transformer class. Copy-paste", "pred_boxes[:,1,:,:] = locations[:,1,:,:] - pred_ltrb[:,1,:,:] # y1 pred_boxes[:,2,:,:] = locations[:,0,:,:]", "# # Copyright (c) Facebook, Inc. and its affiliates. All", "* stride, step=stride, dtype=torch.float32, device=device ) shifts_y = torch.arange( 0,", "locations = locations.reshape(h, w, 2).permute(2, 0, 1) return locations def", "locations = self.locations(features)[None] feat = self.activation(self.feat1(features)) class_logits = self.cls_score(feat) pred_ltrb", "pred_ltrb[:,3,:,:] # y2 return pred_boxes @torch.no_grad() def locations(self, features, stride=4):", ") shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) shift_x = shift_x.reshape(-1) shift_y", "= locations[:,1,:,:] - pred_ltrb[:,1,:,:] # y1 pred_boxes[:,2,:,:] = locations[:,0,:,:] +", "stride, step=stride, dtype=torch.float32, device=device ) shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x)", "W) \"\"\" h, w = features.size()[-2:] device = features.device shifts_x", "nn.Conv2d(self.d_model, self.d_model, kernel_size=3, stride=1, padding=1) self.cls_score = nn.Conv2d(d_model, num_classes, kernel_size=3,", "= nn.Conv2d(d_model, 4, kernel_size=3, stride=1, padding=1) # Init parameters. prior_prob", "and its affiliates. All Rights Reserved \"\"\" OneNet Transformer class.", "* extra LN at the end of encoder is removed", "shifts_y = torch.arange( 0, h * stride, step=stride, dtype=torch.float32, device=device", "pred_ltrb): \"\"\" :param locations: (1, 2, H, W) :param pred_ltrb:", "@torch.no_grad() def locations(self, features, stride=4): \"\"\" Arguments: features: (N, C,", "activation = cfg.MODEL.OneNet.ACTIVATION self.deconv = CenternetDeconv(cfg, backbone_shape) self.num_classes = num_classes", "features: (N, C, H, W) Return: locations: (2, H, W)", "= nn.Conv2d(d_model, num_classes, kernel_size=3, stride=1, padding=1) self.ltrb_pred = nn.Conv2d(d_model, 4,", "ROIPooler, cat from detectron2.structures import Boxes from .deconv import CenternetDeconv", "= -math.log((1 - prior_prob) / prior_prob) self._reset_parameters() def _reset_parameters(self): #", "locations, pred_ltrb): \"\"\" :param locations: (1, 2, H, W) :param", "0, w * stride, step=stride, dtype=torch.float32, device=device ) shifts_y =", "cfg.MODEL.OneNet.PRIOR_PROB self.bias_value = -math.log((1 - prior_prob) / prior_prob) self._reset_parameters() def", "passed in MHattention * extra LN at the end of", "locations: (1, 2, H, W) :param pred_ltrb: (N, 4, H,", "(N, C, H, W) Return: locations: (2, H, W) \"\"\"", "Transformer class. Copy-paste from torch.nn.Transformer with modifications: * positional encodings", "Facebook, Inc. and its affiliates. All Rights Reserved \"\"\" OneNet", "pred_ltrb[:,1,:,:] # y1 pred_boxes[:,2,:,:] = locations[:,0,:,:] + pred_ltrb[:,2,:,:] # x2", "if activation == \"relu\": return F.relu if activation == \"gelu\":", "init all parameters. for p in self.parameters(): if p.dim() >", "bias for focal loss. if p.shape[-1] == self.num_classes: nn.init.constant_(p, self.bias_value)", "W) Return: locations: (2, H, W) \"\"\" h, w =", "pred_ltrb) return class_logits, pred_bboxes def apply_ltrb(self, locations, pred_ltrb): \"\"\" :param", "by <NAME> # Contact: <EMAIL> # # Copyright (c) Facebook,", "# Modified by <NAME> # Contact: <EMAIL> # # Copyright", "= F.relu(self.ltrb_pred(feat)) pred_bboxes = self.apply_ltrb(locations, pred_ltrb) return class_logits, pred_bboxes def", "w = features.size()[-2:] device = features.device shifts_x = torch.arange( 0,", "self.cls_score = nn.Conv2d(d_model, num_classes, kernel_size=3, stride=1, padding=1) self.ltrb_pred = nn.Conv2d(d_model,", "C, H, W) Return: locations: (2, H, W) \"\"\" h,", "from detectron2.structures import Boxes from .deconv import CenternetDeconv class Head(nn.Module):", "= cfg.MODEL.OneNet.NUM_CLASSES d_model = cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation = cfg.MODEL.OneNet.ACTIVATION self.deconv =", ":param pred_ltrb: (N, 4, H, W) \"\"\" pred_boxes = torch.zeros_like(pred_ltrb)", "# Build heads. num_classes = cfg.MODEL.OneNet.NUM_CLASSES d_model = cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation", "device=device ) shifts_y = torch.arange( 0, h * stride, step=stride,", "locations def _get_activation_fn(activation): \"\"\"Return an activation function given a string\"\"\"", "prior_prob = cfg.MODEL.OneNet.PRIOR_PROB self.bias_value = -math.log((1 - prior_prob) / prior_prob)", "Boxes from .deconv import CenternetDeconv class Head(nn.Module): def __init__(self, cfg,", ") shifts_y = torch.arange( 0, h * stride, step=stride, dtype=torch.float32,", "w * stride, step=stride, dtype=torch.float32, device=device ) shifts_y = torch.arange(", "num_classes self.d_model = d_model self.num_classes = num_classes self.activation = _get_activation_fn(activation)", "class. Copy-paste from torch.nn.Transformer with modifications: * positional encodings are", "4, H, W) \"\"\" pred_boxes = torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:] = locations[:,0,:,:]", "Init parameters. prior_prob = cfg.MODEL.OneNet.PRIOR_PROB self.bias_value = -math.log((1 - prior_prob)", "Head(nn.Module): def __init__(self, cfg, backbone_shape=[2048, 1024, 512, 256]): super().__init__() #", "x1 pred_boxes[:,1,:,:] = locations[:,1,:,:] - pred_ltrb[:,1,:,:] # y1 pred_boxes[:,2,:,:] =", "shift_x.reshape(-1) shift_y = shift_y.reshape(-1) locations = torch.stack((shift_x, shift_y), dim=1) +", "Optional, List import torch from torch import nn, Tensor import", "h, w = features.size()[-2:] device = features.device shifts_x = torch.arange(", "= cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation = cfg.MODEL.OneNet.ACTIVATION self.deconv = CenternetDeconv(cfg, backbone_shape) self.num_classes", "pred_ltrb[:,0,:,:] # x1 pred_boxes[:,1,:,:] = locations[:,1,:,:] - pred_ltrb[:,1,:,:] # y1", "\"glu\": return F.glu raise RuntimeError(F\"activation should be relu/gelu, not {activation}.\")", "detectron2.modeling.poolers import ROIPooler, cat from detectron2.structures import Boxes from .deconv", "with modifications: * positional encodings are passed in MHattention *", "\"\"\" import copy import math from typing import Optional, List", "pred_ltrb: (N, 4, H, W) \"\"\" pred_boxes = torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:]", "H, W) \"\"\" pred_boxes = torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:] = locations[:,0,:,:] -", "== \"relu\": return F.relu if activation == \"gelu\": return F.gelu", "return F.relu if activation == \"gelu\": return F.gelu if activation", "step=stride, dtype=torch.float32, device=device ) shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) shift_x", "- prior_prob) / prior_prob) self._reset_parameters() def _reset_parameters(self): # init all", "kernel_size=3, stride=1, padding=1) self.cls_score = nn.Conv2d(d_model, num_classes, kernel_size=3, stride=1, padding=1)", "from torch import nn, Tensor import torch.nn.functional as F from", "H, W) :param pred_ltrb: (N, 4, H, W) \"\"\" pred_boxes", "= num_classes self.activation = _get_activation_fn(activation) self.feat1 = nn.Conv2d(self.d_model, self.d_model, kernel_size=3,", "step=stride, dtype=torch.float32, device=device ) shifts_y = torch.arange( 0, h *", "pred_boxes @torch.no_grad() def locations(self, features, stride=4): \"\"\" Arguments: features: (N,", "W) :param pred_ltrb: (N, 4, H, W) \"\"\" pred_boxes =", "def forward(self, features_list): features = self.deconv(features_list) locations = self.locations(features)[None] feat", "cfg.MODEL.OneNet.ACTIVATION self.deconv = CenternetDeconv(cfg, backbone_shape) self.num_classes = num_classes self.d_model =", "return pred_boxes @torch.no_grad() def locations(self, features, stride=4): \"\"\" Arguments: features:", ".deconv import CenternetDeconv class Head(nn.Module): def __init__(self, cfg, backbone_shape=[2048, 1024,", "self.locations(features)[None] feat = self.activation(self.feat1(features)) class_logits = self.cls_score(feat) pred_ltrb = F.relu(self.ltrb_pred(feat))", "256]): super().__init__() # Build heads. num_classes = cfg.MODEL.OneNet.NUM_CLASSES d_model =", "<EMAIL> # # Copyright (c) Facebook, Inc. and its affiliates.", "-math.log((1 - prior_prob) / prior_prob) self._reset_parameters() def _reset_parameters(self): # init", "a string\"\"\" if activation == \"relu\": return F.relu if activation", "backbone_shape=[2048, 1024, 512, 256]): super().__init__() # Build heads. num_classes =", "cfg, backbone_shape=[2048, 1024, 512, 256]): super().__init__() # Build heads. num_classes", "are passed in MHattention * extra LN at the end", "features, stride=4): \"\"\" Arguments: features: (N, C, H, W) Return:", ":param locations: (1, 2, H, W) :param pred_ltrb: (N, 4,", "= locations[:,0,:,:] - pred_ltrb[:,0,:,:] # x1 pred_boxes[:,1,:,:] = locations[:,1,:,:] -", "shift_x = torch.meshgrid(shifts_y, shifts_x) shift_x = shift_x.reshape(-1) shift_y = shift_y.reshape(-1)", "self._reset_parameters() def _reset_parameters(self): # init all parameters. for p in", "all decoding layers \"\"\" import copy import math from typing", "activation == \"gelu\": return F.gelu if activation == \"glu\": return", "Contact: <EMAIL> # # Copyright (c) Facebook, Inc. and its", "pred_ltrb[:,2,:,:] # x2 pred_boxes[:,3,:,:] = locations[:,1,:,:] + pred_ltrb[:,3,:,:] # y2", "dtype=torch.float32, device=device ) shifts_y = torch.arange( 0, h * stride,", "stride=1, padding=1) self.cls_score = nn.Conv2d(d_model, num_classes, kernel_size=3, stride=1, padding=1) self.ltrb_pred", "\"gelu\": return F.gelu if activation == \"glu\": return F.glu raise", "self.d_model, kernel_size=3, stride=1, padding=1) self.cls_score = nn.Conv2d(d_model, num_classes, kernel_size=3, stride=1,", "\"\"\" h, w = features.size()[-2:] device = features.device shifts_x =", "locations = torch.stack((shift_x, shift_y), dim=1) + stride // 2 locations", "from typing import Optional, List import torch from torch import", "OneNet Transformer class. Copy-paste from torch.nn.Transformer with modifications: * positional", "* stride, step=stride, dtype=torch.float32, device=device ) shift_y, shift_x = torch.meshgrid(shifts_y,", "math from typing import Optional, List import torch from torch", "F.gelu if activation == \"glu\": return F.glu raise RuntimeError(F\"activation should", "activations from all decoding layers \"\"\" import copy import math", "= locations[:,1,:,:] + pred_ltrb[:,3,:,:] # y2 return pred_boxes @torch.no_grad() def", "stride // 2 locations = locations.reshape(h, w, 2).permute(2, 0, 1)", "Inc. and its affiliates. All Rights Reserved \"\"\" OneNet Transformer", "> 1: nn.init.xavier_uniform_(p) # initialize the bias for focal loss.", "num_classes self.activation = _get_activation_fn(activation) self.feat1 = nn.Conv2d(self.d_model, self.d_model, kernel_size=3, stride=1,", "pred_boxes = torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:] = locations[:,0,:,:] - pred_ltrb[:,0,:,:] # x1", "+ stride // 2 locations = locations.reshape(h, w, 2).permute(2, 0,", "d_model = cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation = cfg.MODEL.OneNet.ACTIVATION self.deconv = CenternetDeconv(cfg, backbone_shape)", "F.relu if activation == \"gelu\": return F.gelu if activation ==", "Tensor import torch.nn.functional as F from detectron2.modeling.poolers import ROIPooler, cat", "# init all parameters. for p in self.parameters(): if p.dim()", "self.deconv(features_list) locations = self.locations(features)[None] feat = self.activation(self.feat1(features)) class_logits = self.cls_score(feat)", "Rights Reserved \"\"\" OneNet Transformer class. Copy-paste from torch.nn.Transformer with", "__init__(self, cfg, backbone_shape=[2048, 1024, 512, 256]): super().__init__() # Build heads.", "device = features.device shifts_x = torch.arange( 0, w * stride,", "\"\"\"Return an activation function given a string\"\"\" if activation ==", "# y1 pred_boxes[:,2,:,:] = locations[:,0,:,:] + pred_ltrb[:,2,:,:] # x2 pred_boxes[:,3,:,:]", "import CenternetDeconv class Head(nn.Module): def __init__(self, cfg, backbone_shape=[2048, 1024, 512,", "self.cls_score(feat) pred_ltrb = F.relu(self.ltrb_pred(feat)) pred_bboxes = self.apply_ltrb(locations, pred_ltrb) return class_logits,", "return class_logits, pred_bboxes def apply_ltrb(self, locations, pred_ltrb): \"\"\" :param locations:", "nn.init.constant_(p, self.bias_value) def forward(self, features_list): features = self.deconv(features_list) locations =", "# x2 pred_boxes[:,3,:,:] = locations[:,1,:,:] + pred_ltrb[:,3,:,:] # y2 return", "_reset_parameters(self): # init all parameters. for p in self.parameters(): if", "All Rights Reserved \"\"\" OneNet Transformer class. Copy-paste from torch.nn.Transformer", "locations[:,0,:,:] - pred_ltrb[:,0,:,:] # x1 pred_boxes[:,1,:,:] = locations[:,1,:,:] - pred_ltrb[:,1,:,:]", "for focal loss. if p.shape[-1] == self.num_classes: nn.init.constant_(p, self.bias_value) def", "extra LN at the end of encoder is removed *", "from .deconv import CenternetDeconv class Head(nn.Module): def __init__(self, cfg, backbone_shape=[2048,", "positional encodings are passed in MHattention * extra LN at", "# # Modified by <NAME> # Contact: <EMAIL> # #", "= torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:] = locations[:,0,:,:] - pred_ltrb[:,0,:,:] # x1 pred_boxes[:,1,:,:]", "p.dim() > 1: nn.init.xavier_uniform_(p) # initialize the bias for focal", "in self.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) # initialize the", "shifts_x = torch.arange( 0, w * stride, step=stride, dtype=torch.float32, device=device", "def __init__(self, cfg, backbone_shape=[2048, 1024, 512, 256]): super().__init__() # Build", "features.size()[-2:] device = features.device shifts_x = torch.arange( 0, w *", "(N, 4, H, W) \"\"\" pred_boxes = torch.zeros_like(pred_ltrb) pred_boxes[:,0,:,:] =", "activation function given a string\"\"\" if activation == \"relu\": return", "= self.activation(self.feat1(features)) class_logits = self.cls_score(feat) pred_ltrb = F.relu(self.ltrb_pred(feat)) pred_bboxes =", "locations(self, features, stride=4): \"\"\" Arguments: features: (N, C, H, W)", "2 locations = locations.reshape(h, w, 2).permute(2, 0, 1) return locations", "\"\"\" :param locations: (1, 2, H, W) :param pred_ltrb: (N,", "encodings are passed in MHattention * extra LN at the", "d_model self.num_classes = num_classes self.activation = _get_activation_fn(activation) self.feat1 = nn.Conv2d(self.d_model,", "= self.locations(features)[None] feat = self.activation(self.feat1(features)) class_logits = self.cls_score(feat) pred_ltrb =", "detectron2.structures import Boxes from .deconv import CenternetDeconv class Head(nn.Module): def", "== self.num_classes: nn.init.constant_(p, self.bias_value) def forward(self, features_list): features = self.deconv(features_list)", "= shift_x.reshape(-1) shift_y = shift_y.reshape(-1) locations = torch.stack((shift_x, shift_y), dim=1)", "= nn.Conv2d(self.d_model, self.d_model, kernel_size=3, stride=1, padding=1) self.cls_score = nn.Conv2d(d_model, num_classes,", "locations[:,0,:,:] + pred_ltrb[:,2,:,:] # x2 pred_boxes[:,3,:,:] = locations[:,1,:,:] + pred_ltrb[:,3,:,:]", "class_logits = self.cls_score(feat) pred_ltrb = F.relu(self.ltrb_pred(feat)) pred_bboxes = self.apply_ltrb(locations, pred_ltrb)", "padding=1) self.cls_score = nn.Conv2d(d_model, num_classes, kernel_size=3, stride=1, padding=1) self.ltrb_pred =", "(2, H, W) \"\"\" h, w = features.size()[-2:] device =", "= locations.reshape(h, w, 2).permute(2, 0, 1) return locations def _get_activation_fn(activation):", "= torch.arange( 0, h * stride, step=stride, dtype=torch.float32, device=device )", "torch.stack((shift_x, shift_y), dim=1) + stride // 2 locations = locations.reshape(h,", "stack of activations from all decoding layers \"\"\" import copy", "typing import Optional, List import torch from torch import nn,", "x2 pred_boxes[:,3,:,:] = locations[:,1,:,:] + pred_ltrb[:,3,:,:] # y2 return pred_boxes", "num_classes = cfg.MODEL.OneNet.NUM_CLASSES d_model = cfg.MODEL.OneNet.DECONV_CHANNEL[-1] activation = cfg.MODEL.OneNet.ACTIVATION self.deconv", "Modified by <NAME> # Contact: <EMAIL> # # Copyright (c)", "torch from torch import nn, Tensor import torch.nn.functional as F", "# x1 pred_boxes[:,1,:,:] = locations[:,1,:,:] - pred_ltrb[:,1,:,:] # y1 pred_boxes[:,2,:,:]", "\"relu\": return F.relu if activation == \"gelu\": return F.gelu if", "if activation == \"gelu\": return F.gelu if activation == \"glu\":", "returns a stack of activations from all decoding layers \"\"\"", "end of encoder is removed * decoder returns a stack", "- pred_ltrb[:,1,:,:] # y1 pred_boxes[:,2,:,:] = locations[:,0,:,:] + pred_ltrb[:,2,:,:] #", "affiliates. All Rights Reserved \"\"\" OneNet Transformer class. Copy-paste from", "stride, step=stride, dtype=torch.float32, device=device ) shifts_y = torch.arange( 0, h", "self.ltrb_pred = nn.Conv2d(d_model, 4, kernel_size=3, stride=1, padding=1) # Init parameters.", "of activations from all decoding layers \"\"\" import copy import", "\"\"\" Arguments: features: (N, C, H, W) Return: locations: (2,", "= torch.meshgrid(shifts_y, shifts_x) shift_x = shift_x.reshape(-1) shift_y = shift_y.reshape(-1) locations" ]
[ "* np.power(sig[1], 2.)) -np.power(X[2,:, :, :] - mu[2], 2.) /", "eps=0.0001, momentum=0, affine=True) if bn else None if active_unit ==", "the keys based on memory id. :param pd: parameter dictionary", "in 1D, [5,10] in 2D, [5,10,10] in 3D) :param nrOfI:", "phi[0, ...] + Ab[3] * phi[1, ...] + Ab[5] #", "compute_warped_image_multiNC(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image. :param I0:", "the vector momentum \"\"\" nrOfI = sz[0] # number of", ":, :, :] = fdt.dZc(I)*lam else: raise ValueError('Can only convert", "Z :param spline_order: :param zero_boundary: :param identity_map: :return: \"\"\" if", "-= 1 print( '\\n\\nWARNING: forcing last dimension to be even:", "= compute_warped_image_multiNC(I0.view(torch.Size([1, 1] + list(I0.size()))), phi.view(torch.Size([1] + list(phi.size()))), spacing, spline_order,", "to be of BxCxXxYxZ format) :param spacing: array describing the", "== 2: idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0, :,", "super(AdpSmoother, self).__init__() self.dim = dim self.net_sched = 'm_only' self.s =", "phiR[0, ...] = Ab[0] * phi[0, ...] + Ab[3] *", "= np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else: raise ValueError('Only dimensions 1-3 are currently supported", "2D, [5,10,10] in 3D) :return: returns vector field of size", "Gaussian. :param X: map with coordinates at which to evaluate", "model parameters and converts it into a list of parameters", "+ list(phi.size()))), spacing, spline_order, zero_boundary, use_01_input) return Iw.view(I0.size()) def compute_warped_image_multiNC(I0,", "nrOfI = int(sz[0]) if dim == 1: id = np.zeros([nrOfI,1,sz[2]],dtype=dtype)", "id def centered_identity_map(sz, spacing, dtype='float32'): \"\"\" Returns a centered identity", "== 2: # id = np.mgrid[0:sz[0], 0:sz[1]] # elif dim", "str # from builtins import range import torch from torch.nn.parameter", ":] - mu[2], 2.) / (2 * np.power(sig[2], 2.))) g", "s_m_params['smoother']['type'] = 'gaussian' s_m_params['smoother']['gaussian_std'] = gaussian_std s_m = sf.SmootherFactory(sz, spacing).create_smoother(s_m_params)", "return __time_warped_function def interoplate_boundary_right(tensor): dim = len(tensor.shape)-2 if dim==1: tensor[:,:,-1]=", "if Ab.shape[1] == 2: dim = 1 elif Ab.shape[1] ==", "torch.cat((moving, target), dim=3) elif sched == 'list_concat': input = torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0)", "1: g = np.exp(-np.power(X[0, :] - mu[0], 2.)/(2*np.power(sig[0], 2.))) g", "create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\" Create vector field torch Parameter of given", "0:sz[2]] # else: # raise ValueError('Only dimensions 1-3 are currently", "= nn.Conv2d(self.dim + 2, self.dim, kernel_size, 1, padding=padding_size, bias=False) else:", ":return: tuple of (parameter_list, name_dictionary) \"\"\" par_to_name_dict = dict() pl", "affine parameters \"\"\" dim = 0 if Ab.shape[1]==2: dim =", "to 9 are supported') if spline_order == 0: stn =", "parameter vector \"\"\" return Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7)) def create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False): \"\"\"Create", "mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) return mask.detach()", "= self.fc(x) if self.active_unit is not None: x = self.active_unit(x)", "\"\"\" pl = [] for key in pd: pl.append(pd[key]) return", "ID = compute_warped_image_multiNC(I, idDes, newspacing, spline_order, zero_boundary) return ID if", "idnp[1, :, :, :] = id[1] * spacing_ratio[1] # idnp[2,", "each list element has a dictionary with keys 'name' and", "lam: scalar momentum, BxCxXxYxZ :param I: image, BxCxXxYxZ :param sz:", "# # return idnp # # def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): # if", "phiR = MyTensor(sz).zero_().type_as(phi) for nrI in range(nr_of_images): phiR[nrI, ...] =", "spacing_ratio[2] # else: # raise ValueError('Only dimensions 1-3 are currently", "= Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1), requires_grad = True) self.get_net_sched() #self.net.register_backward_hook(bh) def get_net_sched(self,", "spatial dimensions, i.e., XxYxZ :param spacing: list with spacing information", "= STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order)", "return ID if not is_numpy else ID.numpy(), newspacing def get_res_size_from_size(sz,", "if dim==3: tensor[:, :,:, -1,:, :] = tensor[:, :, -2,", "init.normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0)", "torch.Tensor): I = torch.Tensor(I) is_numpy = True sz = np.array(list(I.size()))", "inputs['s'].detach() self.t = inputs['t'].detach() self.mask = Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1), requires_grad =", "NotImplementedError('initialization method [%s] is not implemented' % init_type) def organize_data(moving,", "pars (batch size x param. vector) :return: Updated affine parameters", "is simply [a1;a2;a3;b], i.e., all columns stacked on top of", "maps \"\"\" sz = phi.size() dim = get_dim_of_affine_transform(Ab[0,:]) nr_of_images =", "stacked on top of each other. :param Ab: parameter vector", "\"\"\" Returns a centered identity map (with 0 in the", "= np.array(id.astype(dtype)) if dim == 1: id = id.reshape(1, sz[0])", "# spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio)) # sp_sz = [1]+[dim] +[1]*dim #", "= [torch.sqrt(std_w) for std_w in gaussian_std_weights] for g in range(nr_of_mg_weights):", "dim =0 if Ab.shape[1] == 2: dim = 1 elif", "specified number of elements. :param nr_of_elements: number of vector elements", "0 in the middle) if the sz is odd #", "dimxXxYxZ # \"\"\" # dim = len(sz) # if dim", "of A of appropriate dimension \"\"\" current_dim = len(A.shape) if", "forcing last dimension to be even: fix properly in the", "vector momentum \"\"\" nrOfI = sz[0] # number of images", "ext_s = os.path.splitext(abs_s)[1] abs_t = os.path.abspath(tf) root_t,ext_t = os.path.splitext(abs_t) abs_t_with_right_ext", "2.) / (2 * np.power(sig[1], 2.)) -np.power(X[2,:, :, :] -", "g = compute_normalized_gaussian(centered_id, mus, stds) tensors[n,c] = torch.from_numpy(g) def weights_init_uniform(m):", "if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm =", "def create_vector_parameter(nr_of_elements): \"\"\"Creates a vector parameters with a specified number", "indicating the standard deviations for the different dimensions :return: Normalized", "dtype=dtype) idnp[0,:, :, :] = id[0] idnp[1,:, :, :] =", "# for d in range(dim): # id[d] *= spacing[d] #", "the cpu :param v: torch array :return: numpy array \"\"\"", "def _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): if spline_order not", "spline_order == 0: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif", "a map. :param Ab: affine transform parameter column vector :param", "array csz = np.array([nrOfI,nrOfC]+list(csz)) return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def centered_identity_map_multiN(sz, spacing, dtype='float32'):", "sz[0], sz[1], sz[2]], dtype=dtype) idnp[0, :, :, :] = id[0]", "\"\"\"Update affine parameters. Formally: C(Ax+b)+d = CAx+Cb+d :param Ab: B", "if var is not None else None for var in", "v ): \"\"\" Takes a torch array and returns it", "y=Ax+b stored in a column vector. For A =[a1,a2,a3], the", "classname = m.__class__.__name__ print(classname) if classname.find('Conv') != -1: init.orthogonal(m.weight.data, gain=1)", "sz[0], sz[1]], dtype=dtype) idnp[0,:, :] = id[0] idnp[1,:, :] =", "+= [nn.Sigmoid()] self.net = nn.Sequential(*net) def prepare_data(self, m, new_s): input=None", "type(v.data) == torch.cuda.FloatTensor or v.data.dtype==torch.float32: return torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif", "classname.find('Conv') != -1: init.uniform(m.weight.data, 0.038, 0.042) elif classname.find('Linear') != -1:", ":return: returns vector field of size nrOfIxdimxXxYxZ \"\"\" dim =", ". import finite_differences as fd import torch.nn as nn import", "id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] else: raise ValueError('Only dimensions 1-3", "<1) :return: low res size \"\"\" if (factor is None):", "init.xavier_normal(m.weight.data, gain=1) elif classname.find('Linear') != -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('BatchNorm2d')", "is not implemented' % init_type) def organize_data(moving, target, sched='depth_concat'): if", "add assert assert abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))< 0.1, \"nn interpolation is not", "= nn.BatchNorm2d(out_channels, eps=0.0001, momentum=0, affine=True) if bn else None if", "'elu': self.active_unit = nn.ELU(inplace=True) else: self.active_unit = None def forward(self,", "Returns the corresponding low-res size from a (high-res) sz :param", "np.exp(-np.power(X[0, :] - mu[0], 2.)/(2*np.power(sig[0], 2.))) g = g/g.sum() return", "the right format model_pars = ind_pars else: # if ind_pars", "for model parameters and converts it into a list of", "== 'nn': warped_label_map = compute_warped_image_multiNC(label_map, phi, spacing,spline_order=0,zero_boundary=True) # check if", "on memory id. :param pd: parameter dictionary :return: tuple of", "1: # idnp = np.zeros([1, sz[0]], dtype=dtype) # idnp[0, :]", "X: map with coordinates at which to evaluate :param mu:", "'depth_concat': input = torch.cat([moving, target], dim=1) elif sched == 'width_concat':", "= torch.Tensor(I) is_numpy = True sz = np.array(list(I.size())) # check", "self.active_unit = None def forward(self, x): x = self.fc(x) if", "self).__init__() self.dim = dim self.net_sched = 'm_only' self.s = inputs['s'].detach()", "image. :param I0: image to warp, image size BxCxXxYxZ :param", "has a dictionary with keys 'name' and 'model_params' for par", "= get_dim_of_affine_transform(Ab) if dim==1: Ab.zero_() Ab[0]=1. elif dim==2: Ab.zero_() Ab[0]=1.", "tensor[:, :, :,-3] if dim==3: tensor[:, :,:, -1,:, :] =", "resampled, new_spacing = resample_image(I, spacing, desiredSize, spline_order=spline_order, zero_boundary=zero_boundary, identity_map=identity_map) return", "the vector momentum from the scalar momentum: :math:`m=\\\\lambda\\\\nabla I`. :param", "of transform (1,2,or 3) \"\"\" nr = len(Ab) if nr==2:", "map for the warping, size dimxXxYxZ :param spacing: image spacing", "/ (2 * np.power(sig[1], 2.)) -np.power(X[2,:, :, :] - mu[2],", "noramlized_spacing_to_smallest(spacing): min_sp = np.min(spacing) spacing[spacing>min_sp]=min_sp return spacing def time_warped_function(f): def", "sizes (e.g., [5] in 1D, [5,10] in 2D, [5,10,10] in", "= m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.kaiming_normal(m.weight.data, a=0,", "torch.cuda.HalfTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else: raise ValueError('Unknown data type:", "= len(tensor.shape)-2 if dim==1: tensor[:,:,-1]= tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3] if dim==2: tensor[:,", "g/g.sum() return g elif dim == 2: g = np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.))", "!= -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0,", "if isinstance(tensors, Variable): space_normal(tensors.data, std=std) return tensors for n in", "supports dimensions 1, 2, and 3.') return phiR def apply_affine_transform_to_map_multiNC(Ab,phi):", "Ab_inv[n, :, :dim] = tm_inv Ab_inv[n, :, dim] = -", "Ab: affine transform parameter column vector :param phi: map; format", "elif dim==3: idnp = np.zeros([3,sz[0], sz[1], sz[2]], dtype=dtype) idnp[0,:, :,", "in a dim+1 array if dim==1: idnp = np.zeros([1, sz[0]],", "pl.append(pd[key]) par_to_name_dict[pd[key]] = key return pl, par_to_name_dict def remove_infs_from_variable(v): #", "= np.mgrid[0:sz[0]] # elif dim == 2: # id =", "supports dimensions 1, 2, and 3.') Ab = Ab.view(Ab.shape[0], dim+1,", "var_list # return new_var_list # def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform:", "* spacing_ratio[1] # idnp[2, :, :, :] = id[2] *", "spacing. :param spacing: image spacing :param sz: size of image", "parameterization \"\"\" # todo: check that this is the correct", "dim+1, dim).transpose(1, 2) updated_param = torch.zeros_like(Ab) for n in range(Ab.shape[0]):", "id = np.array(id.astype(dtype)) if dim == 1: id = id.reshape(1,", "# omt_weight_penalty = param['forward_model']['smoother']['omt_weight_penalty'] # min_std = torch.min(stds) # max_std", "= I0.dim()-2 if dim == 1: return _compute_warped_image_multiNC_1d(I0, phi, spacing,", "as factor was ' + str(factor)) return np.array(sz) else: low_res_sz", "d def get_parameter_list_from_parameter_dict(pd): \"\"\"Takes a dictionary which contains key value", "field of size nrOfIxdimxXxYxZ \"\"\" dim = len(sz) csz =", "of affine transforms') phiR = MyTensor(sz).zero_().type_as(phi) for nrI in range(nr_of_images):", "import Parameter from torch.autograd import Variable from .libraries.modules.stn_nd import STN_ND_BCXYZ", "mean[x]}{ \\sqrt{Var[x] + \\epsilon}} * gamma + beta #When affine=False", "dict() if type(ind_pars) == type(dict()): # should already be in", "init from . import module_parameters as pars from .spline_interpolation import", "maps; format batchxnrCxXxYxZ (nrC corresponds to dimension) :return: returns transformed", "2.) / (2 * np.power(sig[1], 2.))) g = g/g.sum() return", "Create vector field torch Parameter of given size :param sz:", ":return: Normalized Gaussian evaluated at coordinates in X Example:: >>>", "def weights_init_rd_normal(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') !=", ":param Ab: parameter vector :return: dimensionality of transform (1,2,or 3)", "1, 2, 3, 4, 5, 6, 7, 8, 9]: raise", "sz[1]], dtype=dtype) # idnp[0, :, :] = id[0] * spacing_ratio[0]", "import smoother_factory as sf from .data_wrapper import USE_CUDA import numpy", "dim, net_sched=None): # settings should include [using_bias, using bn, using", ":, :, -2] + tensor[:, :, :, -2] - tensor[:,", "but not remove them') if current_dim == dim: return A", ".libraries.functions.nn_interpolation import get_nn_interpolation except ImportError: print('WARNING: nn_interpolation could not be", "type(v.data) == torch.cuda.DoubleTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif v.data.dtype ==", "-1) return inv_affine_param def update_affine_param(Ab, Cd): \"\"\"Update affine parameters. Formally:", "for the identity map') # now get it into range", "the same nrOfI = sz[0] nrOfC = sz[1] desiredSizeNC =", "1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_normal(m): classname = m.__class__.__name__ #", "1D, [5,10] in 2D, [5,10,10] in 3D) :param nrOfI: number", "the low-res parametrization from image spacing. :param spacing: image spacing", "image size BxCxXxYxZ :param phi: map for the warping, size", "= np.array(id.astype(dtype)) # if dim == 1: # id =", "Y Z :param spacing: spx spy spz :param desiredSize: B", "returns transformed map \"\"\" sz = phi.size() dim = len(sz)", "of them. :param d1: dictionary 1 :param d2: dictionary 2", "sig): \"\"\"Computes a normalized Gaussian. :param X: map with coordinates", "3: idnp = np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) idnp[0, :,", "# new_var_list = var_list # return new_var_list # def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True):", "not in [1, 2, 3]: raise ValueError('Only supports dimensions 1,", "parameter vector is simply [a1;a2;a3;b], i.e., all columns stacked on", "dim] = - torch.matmul(tm_inv, Ab[n,:,dim]) inv_affine_param = Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0], -1)", "Updated affine parameters \"\"\" dim = 0 if Ab.shape[1]==2: dim", ":param X: map with coordinates at which to evaluate :param", "dim == 2: id = np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif dim == 3:", "scalar momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum, BxCxXxYxZ :param", "...], nrOfI, sz[2::], spacing) return m def compute_vector_momentum_from_scalar_momentum_multiN(lam, I, nrOfI,", "and now store it in a dim+1 array if dim==1:", "the downsampled image, the new spacing after downsampling \"\"\" desiredSize", "nrOfC = sz[1] desiredSizeNC = np.array([nrOfI, nrOfC] + list(desiredSize)) newspacing", "+ beta #When affine=False the output of BatchNorm is equivalent", "size x param. vector) :return: Inverse of affine parameters \"\"\"", "if using_sigmoid: net += [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched", "torch Parameter of given size. :param sz: just the spatial", "spline_order, zero_boundary) return ID if not is_numpy else ID.numpy(), newspacing", "an image to a given desired size :param I: Input", "only orders 0 to 9 are supported') if spline_order ==", "= np.array(sz) if not isinstance(factor, list): lowResSize[2::] = (np.ceil((np.array(sz[2:]) *", "def get_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\" Computes spacing for the low-res", "AdaptVal(torch.Tensor(spacing_ratio)) # sp_sz = [1]+[dim] +[1]*dim # spacing_ratio_t = spacing_ratio_t.view(*sp_sz)", "m = m + compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c, ...], I[:, c, ...],", "dimension) :return: returns transformed map \"\"\" sz = phi.size() dim", "get_res_size_from_size(sz, factor): \"\"\" Returns the corresponding low-res size from a", "target], dim=1) elif sched == 'width_concat': input = torch.cat((moving, target),", "for the identity map') for n in range(nrOfI): id[n, ...]", "sched == 'depth_concat': input = torch.cat([moving, target], dim=1) elif sched", "v: torch array :return: numpy array \"\"\" return (v.detach()).cpu().numpy() def", ":return: returns the identity map \"\"\" dim = len(sz)-2 nrOfI", "MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad = True else: tmp = Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return tmp", "dictionary :return: list of parameters \"\"\" pl = [] for", "* phi[1, ...] + Ab[5] # a_12x+a_22y+b2 elif dim ==", "elif dim==3: id = np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else: raise ValueError('Only dimensions 1-3", "== 2: m[:, 0, :, :] = fdt.dXc(I)*lam m[:, 1,", "g else: raise ValueError('Can only compute Gaussians in dimensions 1-3')", "# and now store it in a dim+1 array if", "return idnp # # def centered_min_normalized_identity_map(sz, spacing, dtype='float32'): # \"\"\"", "map. :param sz: just the spatial dimensions, i.e., XxYxZ :param", "number of dimensions corresponding to an affine transformation of the", "return I1_warped def _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not", "\"\"\" Resample an image to a given desired size :param", "tuple: the downsampled image, the new spacing after downsampling \"\"\"", "v.size() reduction_factor = np.prod(np.array(sz)) condition = True if type(v.data) ==", "dim = len(img_sz) mask_sz = [1,1]+ list(img_sz) mask = AdaptVal(torch.zeros(*mask_sz))", "get_warped_label_map(label_map, phi, spacing, sched='nn'): if sched == 'nn': warped_label_map =", "= sm.smooth(mask) return mask.detach() def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2): \"\"\"generate a smooth", "middle) if the sz is odd Otherwise shifts everything by", "sz: just the spatial dimensions, i.e., XxYxZ # :param spacing:", "the mean :param sig: array indicating the standard deviations for", "this size should not occur in practice anyway sz =", "now store it in a dim+1 array and rescale by", "entry for 1D, 2 for 2D, and 3 for 3D)", "0.042) elif classname.find('Linear') != -1: init.uniform(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm2d')", "= nn.Sequential(*net) elif self.net_sched == 'm_d_s': if debugging: self.net =", "omt_weight_penalty = param['forward_model']['smoother']['omt_weight_penalty'] # min_std = torch.min(stds) # max_std =", "/ (np.array(lowResSize[2::]) - 1) ########################################## Adaptive Net ###################################################3 def space_normal(tensors,", "zero_boundary, use_01_input) return Iw.view(I0.size()) def compute_warped_image_multiNC(I0, phi, spacing, spline_order, zero_boundary=False,", "for nrI in range(nr_of_images): phiR[nrI, ...] = apply_affine_transform_to_map(Ab[nrI, :], phi[nrI,", "sz: size of image :param lowResSize: size of low re", "!= -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('Linear') != -1: init.xavier_normal(m.weight.data, gain=1)", "'orthogonal': net.apply(weights_init_orthogonal) else: raise NotImplementedError('initialization method [%s] is not implemented'", "B x pars (batch size x param. vector) :return: Inverse", "'m_d_s_f_t': input = organize_data(m, new_s, sched='depth_concat') input = organize_data(input, self.t,", "1D, 2 for 2D, and 3 for 3D) :return: returns", "ImportError: print('WARNING: nn_interpolation could not be imported (only supported in", "sz. :param sz: size (high-res) :param factor: low-res factor (needs", "(v.detach()).cpu().numpy() def cxyz_to_xyzc( v ): \"\"\" Takes a torch array", "elif Ab.shape[1] == 6: dim = 2 elif Ab.shape[1] ==", "else None for var in var_list] # else: # new_var_list", "organize_data(m, self.s, sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') elif self.net_sched", "resample_image(I, spacing, desiredSize, spline_order=spline_order, zero_boundary=zero_boundary, identity_map=identity_map) return resampled def resample_image(I,", "key return pl, par_to_name_dict def remove_infs_from_variable(v): # 32 - bit", "corresponds to dimension) :return: returns transformed map \"\"\" sz =", "a centered identity map (shifted so it is centered around", "simply [a1;a2;a3;b], i.e., all columns stacked on top of each", "use_01_input=use_01_input) elif spline_order == 1: stn = STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True, use_01_input=use_01_input)", "the new spacing after downsampling \"\"\" desiredSize = desiredSize[2:] is_numpy", "elu] # inputs should be a dictionary could contain ['s'],['t']", ":,-1] = tensor[:, :, :,-2] + tensor[:, :, :,-2] -", "debugging: self.net = nn.Conv2d(2, 2, kernel_size, 1, padding=padding_size, bias=False,groups=2) else:", "(i.e., list and each list element has a dictionary with", "g = g/g.sum() return g elif dim == 2: g", "scalar momentum, BxCxXxYxZ :param I: image, BxCxXxYxZ :param sz: size", "= MyTensor(sz).zero_().type_as(phi) for nrI in range(nr_of_images): phiR[nrI, ...] = apply_affine_transform_to_map(Ab[nrI,", "dim = len(img_sz) mask_sz = [1,1]+ list(img_sz) mask = AdaptVal(torch.ones(*mask_sz))*mask_value", "* spacing_ratio[2] # else: # raise ValueError('Only dimensions 1-3 are", "affine transform to maps (for arbitrary batch size). :param Ab:", "a dictionary which keeps track of the keys based on", "2, and 3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1, 2) Cd", "dim = len(v.shape)-2 if dim ==2: v = v.permute(0,2,3,1) if", "!= -1: space_normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02)", "that they come from the optimizer # (i.e., list and", "super(FcRel, self).__init__() self.fc = nn.Linear(in_features, out_features) if active_unit == 'relu':", "input = organize_data(m, new_s, sched='depth_concat') elif self.net_sched == 'm_f_s_t': input", "c in range(nrOfC): # loop over all the channels and", "tensor[:, :, :,-1] = tensor[:, :, :,-2] + tensor[:, :,", "1.)) ########################################### if identity_map is not None: idDes = identity_map", "in_features, out_features, active_unit='relu'): super(FcRel, self).__init__() self.fc = nn.Linear(in_features, out_features) if", "nn.Linear(in_features, out_features) if active_unit == 'relu': self.active_unit = nn.ReLU(inplace=True) elif", "elif dim == 2: id = np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif dim ==", "array csz = np.array([nrOfI, dim]+list(csz)) if get_field_from_external_network: tmp = MyTensor(*(csz.tolist())).normal_(0.,1e-7)", "the middle) if the sz is odd Otherwise shifts everything", "5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if", "conv + bn (optional) + relu def __init__(self, in_channels, out_channels,", "+= [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_f_s_t': if", "combine_dict(d1,d2): \"\"\"Creates a dictionary which has entries from both of", "# spacing_ratio_t = spacing_ratio_t.view(*sp_sz) # new_var_list = [var*spacing_ratio_t if var", "do the symlink os.symlink(abs_s,abs_t_with_right_ext) def combine_dict(d1,d2): \"\"\"Creates a dictionary which", "compute low_res_size as factor was ' + str(factor)) return np.array(sz)", "= np.exp(-np.power(X[0, :] - mu[0], 2.)/(2*np.power(sig[0], 2.))) g = g/g.sum()", "of arbitrary batch size). :param Ab: Parameter vectors B x", "factor: low-res factor (needs to be <1) :return: low res", "# if dim == 1: # id = np.mgrid[0:sz[0]] #", "-1: init.orthogonal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02)", "'gaussian' s_m_params['smoother']['gaussian_std'] = gaussian_std s_m = sf.SmootherFactory(sz, spacing).create_smoother(s_m_params) return s_m", "= Ab[0] * phi[0, ...] + Ab[3] * phi[1, ...]", "self.s = inputs['s'].detach() self.t = inputs['t'].detach() self.mask = Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1),", "from builtins import str # from builtins import range import", "\"\"\" dim = 0 if Ab.shape[1]==2: dim = 1 elif", "image of size XxYxZ \"\"\" # implements this by creating", "I`. :param lam: scalar momentum, BxCxXxYxZ :param I: image, BxCxXxYxZ", "elif spline_order == 1: stn = STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True, use_01_input=use_01_input) else:", "IS.ResampleImage() low_res_image, _ = sampler.downsample_image_to_size(I, spacing, low_res_size[2::],spline_order) return low_res_image def", "stn = STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order)", "a specified number of elements. :param nr_of_elements: number of vector", ":, :, :] = id[1] idnp[2, :, :, :] =", "== 1: # id = id.reshape(1, sz[0]) # add a", "# omt_const = torch.abs(torch.log(max_std/stds))**omt_power # omt_const = omt_const/(torch.abs(torch.log(max_std / min_std))", "classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_kaiming(m):", "FcRel(nn.Module): # fc+ relu(option) def __init__(self, in_features, out_features, active_unit='relu'): super(FcRel,", "Reorganize this package in a more meaningful way. \"\"\" from", "/ g.sum() return g else: raise ValueError('Can only compute Gaussians", "min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif v.data.dtype == torch.DoubleTensor or type(v.data) == torch.cuda.DoubleTensor:", "= torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim] = tm_param updated_param[n,:,dim] = torch.matmul(Cd[n,:,:dim], Ab[n,:,dim]) +Cd[n,:,dim]", "identity map (with 0 in the middle) if the sz", "everything by 0.5*spacing # # :param sz: just the spatial", "if dim==1: tensor[:,:,-1]= tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3] if dim==2: tensor[:, :, -1,:]", "torch.cat([moving, target], dim=1) elif sched == 'width_concat': input = torch.cat((moving,", "return x class FcRel(nn.Module): # fc+ relu(option) def __init__(self, in_features,", "affine transformation. Formally: C(Ax+b)+d = CAx+Cb+d = x; C =", "check that this is the correct way of doing it", "= identity_map(sz[2::],spacing,dtype=dtype) return id def centered_identity_map(sz, spacing, dtype='float32'): \"\"\" Returns", "an image in BxCxXxYxZ format :param spacing: list with spacing", "torch.cuda.FloatTensor or v.data.dtype==torch.float32: return torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif v.data.dtype ==", "it as a numpy array on the cpu :param v:", "X Y Z :param spline_order: :param zero_boundary: :param identity_map: :return:", "def get_scalar(v): if isinstance(v, float): return v elif isinstance(v, np.ndarray)", "len(tensor.shape)-2 if dim==1: tensor[:,:,-1]= tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3] if dim==2: tensor[:, :,", "# id = np.mgrid[0:sz[0], 0:sz[1]] # elif dim == 3:", "A =[a1,a2,a3], the parameter vector is simply [a1;a2;a3;b], i.e., all", "which to evaluate :param mu: array indicating the mean :param", "# min_std = torch.min(stds) # max_std = torch.max(stds) # omt_const", "new_spacing = resample_image(I, spacing, desiredSize, spline_order=spline_order, zero_boundary=zero_boundary, identity_map=identity_map) return resampled", "Ab.zero_() Ab[0]=1. elif dim==2: Ab.zero_() Ab[0]=1. Ab[3]=1. elif dim==3: Ab.zero_()", "return id def centered_identity_map(sz, spacing, dtype='float32'): \"\"\" Returns a centered", "of doing it return spacing * (np.array(sz[2::])-1) / (np.array(lowResSize[2::])-1) def", "= len(sz)-2 nrOfI = int(sz[0]) if dim == 1: id", "raise ValueError('Incompatible number of affine transforms') if dim != len(sz)-2:", "to warp, image size BxCxXxYxZ :param phi: map for the", "print( '\\n\\nWARNING: forcing last dimension to be even: fix properly", "spacing for the low-res parameterization from image spacing :param spacing:", "\"\"\" current_dim = len(A.shape) if current_dim > dim: raise ValueError('Can", "par_to_name_dict = dict() pl = [] for key in pd:", "\"\"\"Applies an affine transform to maps (for arbitrary batch size).", "= compute_normalized_gaussian(centered_id, mus, stds) tensors[n,c] = torch.from_numpy(g) def weights_init_uniform(m): classname", "= os.path.abspath(sf) ext_s = os.path.splitext(abs_s)[1] abs_t = os.path.abspath(tf) root_t,ext_t =", "ValueError('Only dimensions 1-3 are currently supported for the centered identity", "len(sz)-2 nrOfI = int(sz[0]) if dim == 1: id =", ":, :] = id[0] idnp[1, :, :, :] = id[1]", "n_batch = I.shape[0] desiredSize = desiredSize.copy() desiredSize[0] = n_batch identity_map", "more meaningful way. \"\"\" from __future__ import print_function from __future__", "id = np.mgrid[0:sz[0]] elif dim==2: id = np.mgrid[0:sz[0],0:sz[1]] elif dim==3:", "SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped = stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_3d(I0,", "= inputs['t'].detach() self.mask = Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1), requires_grad = True) self.get_net_sched()", "else: # raise ValueError('Only dimensions 1-3 are currently supported for", "transform to maps (for arbitrary batch size). :param Ab: affine", "mask*mask*mask return mask # def compute_omt_const(stds,param,dim): # omt_power = param['forward_model']['smoother']['omt_power']", "supports dimensions 1, 2, and 3.') phiR = MyTensor(sz).zero_().type_as(phi) if", "sz[1]], dtype=dtype) idnp[0,:, :] = id[0] idnp[1,:, :] = id[1]", "!= 0: lowResSize[-1] -= 1 print( '\\n\\nWARNING: forcing last dimension", "= np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) idnp[0, :, :, :]", "numpy array csz = np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights = torch.empty(*csz) # set", "0: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order ==", ":] = fdt.dXc(I)*lam m[:, 1, :, :, :] = fdt.dYc(I)*lam", "= np.zeros([nrOfI, 3, sz[2], sz[3], sz[4]], dtype=dtype) else: raise ValueError('Only", ":] = id[0] idnp[1,:, :] = id[1] elif dim==3: idnp", "= v.permute(0,2,3,1) if dim ==3: v = v.permute(0,2,3,4,1) return v", "std=0.1): \"\"\" space normalize for the net kernel :param tensor:", "#even # id[d] -= spacing[d]*(sz[d]//2) # else: # #odd #", "list of parameters that can be used as an input", "= 2 elif Ab.shape[1]==12: dim = 3 if dim not", "6, 7, 8, 9]: raise ValueError('Currently only orders 0 to", "= ind_pars else: # if ind_pars is not a dictionary", "momentum \"\"\" fdt = fd.FD_torch(spacing) dim = len(sz) m =", "MyTensor(sz).zero_().type_as(phi) for nrI in range(nr_of_images): phiR[nrI, ...] = apply_affine_transform_to_map(Ab[nrI, :],", "if debugging: self.net = nn.Conv2d(self.dim+2, self.dim, kernel_size, 1, padding=padding_size, bias=False)", ":] = id[2] * spacing_ratio[2] # else: # raise ValueError('Only", "kernel_size, stride, padding=padding, bias=bias) else: self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size,", "Ab[10] phiR[2, ...] = Ab[2] * phi[0, ...] + Ab[5]", "[nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_d_s': if debugging:", "the sz is odd # Otherwise shifts everything by 0.5*spacing", "dimension \"\"\" current_dim = len(A.shape) if current_dim > dim: raise", "x param. vector) :return: Updated affine parameters \"\"\" dim =", "if get_field_from_external_network: tmp = MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad = True else: tmp", "np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] else: raise ValueError('Only dimensions 1-3 are currently", "idnp[2,:, :, :] = id[2] else: raise ValueError('Only dimensions 1-3", "elif v.data.dtype == torch.HalfTensor or type(v.data) == torch.cuda.HalfTensor: return torch.clamp(v,", "else None if active_unit == 'relu': self.active_unit = nn.ReLU(inplace=True) elif", "= Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return tmp def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\" Create vector", "input = organize_data(input, self.t, sched='depth_concat') elif self.net_sched == 'm_d_s_f_t': input", "spacing_ratio[1] # elif dim == 3: # idnp = np.zeros([3,", "from __future__ import print_function from __future__ import absolute_import # from", "I1_warped def compute_warped_image(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image.", "if type(v.data) == torch.cuda.FloatTensor or v.data.dtype==torch.float32: return torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor)", "not a dictionary assume that they come from the optimizer", "# just to make sure it is a numpy array", "((sz[2::].astype('float') - 1.) / ( desiredSizeNC[2::].astype('float') - 1.)) ########################################### if", "sz is odd Otherwise shifts everything by 0.5*spacing :param sz:", "# #even # id[d] -= spacing[d]*(sz[d]//2) # else: # #odd", "elif dim == 3: # idnp = np.zeros([3, sz[0], sz[1],", "idDes = AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing))) # now use this map for", "= np.zeros([1, sz[0]], dtype=dtype) idnp[0, :] = id[0] elif dim", "0.0) def weights_init_kaiming(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv')", "dim = len(tensor.shape)-2 if dim==1: tensor[:,:,-1]= tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3] if dim==2:", "here? Needed for new identity map code') raise ValueError('Double check", "= identity_map else: idDes = AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing))) # now use", "(with 0 in the middle) if the sz is odd", "spline_order, zero_boundary=False, use_01_input=True): if spline_order not in [0, 1, 2,", "= stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True):", "nr_of_images != sz[0]: raise ValueError('Incompatible number of affine transforms') if", "2) if same_padding else 0 if not reverse: self.conv =", ":] = id[0] elif dim == 2: idnp = np.zeros([2,", "else: raise ValueError('Can only convert scalar to vector momentum in", "# # for d in range(dim): # id[d] *= spacing[d]", "sz: size of an image in BxCxXxYxZ format :param spacing:", "* np.array(factor)))).astype('int16') if lowResSize[-1] % 2 != 0: lowResSize[-1] -=", "'m_f_s_t': input = organize_data(m, self.s, sched='depth_concat') input = organize_data(input, self.t,", "view (effectively adding dimensions) Iw = compute_warped_image_multiNC(I0.view(torch.Size([1, 1] + list(I0.size()))),", "nr==12: return 3 else: raise ValueError('Only supports dimensions 1, 2,", "def _get_low_res_size_from_size(sz, factor): \"\"\"Returns the corresponding low-res size from a", "= len(Ab) if nr==2: return 1 elif nr==6: return 2", "spacing = np.ones(dim) centered_id = centered_identity_map(sz,spacing) g = compute_normalized_gaussian(centered_id, mus,", "self.net = nn.Sequential(*net) elif self.net_sched == 'm_f_s_t': if debugging: self.net", "any input elements are NaNs. :param x: numpy array :return:", "= id[0] idnp[1, :, :] = id[1] elif dim ==", "BxCxXxYxZ :param phi: map for the warping, size BxdimxXxYxZ :param", "if self.net_sched == 'm_only': input = m elif self.net_sched ==", "fd import torch.nn as nn import torch.nn.init as init from", "size XxYxZ \"\"\" # implements this by creating a different", "the vector momentum \"\"\" fdt = fd.FD_torch(spacing) dim = len(sz)", "bias=False) else: net = \\ [ConvBnRel(self.dim +1, 20, 5, active_unit=active_unit,", "== 3: m[:, 0, :, :, :] = fdt.dXc(I)*lam m[:,", "current_dim == dim: return A else: return A.reshape([1]*(dim-current_dim)+list(A.shape)) def get_dim_of_affine_transform(Ab):", "# new_var_list = var_list # return new_var_list # def identity_map(sz,spacing,dtype='float32'):", "are supported') if spline_order == 0: stn = STN_ND_BCXYZ(spacing, zero_boundary,", ":, :] = id[1] idnp[2,:, :, :] = id[2] else:", "2, and 3.') phiR = MyTensor(sz).zero_().type_as(phi) if dim == 1:", "of dimension dim (by adding dummy dimensions if necessary). :param", "__init__(self, in_channels, out_channels, kernel_size, stride=1, active_unit='relu', same_padding=False, bn=False, reverse=False, bias=False):", "cleaner way of handling this # this is to make", "1 entry for 1D, 2 for 2D, and 3 for", "\"\"\" Takes a torch array and returns it as a", "def space_normal(tensors, std=0.1): \"\"\" space normalize for the net kernel", "will be overwritten with identity trans. :return: \"\"\" sz =", "!= -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_rd_normal(m): classname", "Takes a torch array and returns it as a numpy", "init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02)", "fdt = fd.FD_torch(spacing) dim = len(sz) m = create_ND_vector_field_variable_multiN(sz, nrOfI)", "return low_res_sz def _compute_low_res_image(I, spacing, low_res_size, spline_order): import mermaid.image_sampling as", "else \"\"\" return (x != x).any() def create_symlink_with_correct_ext(sf, tf): abs_s", "new_s, sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') return input def", "== 2: id = np.zeros([nrOfI, 2, sz[2], sz[3]], dtype=dtype) elif", "np.zeros(dim) stds = std * np.ones(dim) print('WARNING: What should the", "def get_inverse_affine_param(Ab): \"\"\"Computes inverse of affine transformation. Formally: C(Ax+b)+d =", "!= -1: init.normal(m.weight.data) elif classname.find('Linear') != -1: init.normal(m.weight.data) elif classname.find('BatchNorm2d')", "# nothing to do here, these are already the same", "+ 2, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net =", "dtype=dtype) idnp[0,:, :] = id[0] idnp[1,:, :] = id[1] elif", "low res size \"\"\" if (factor is None) or (factor", "on the cpu :param v: torch array :return: numpy array", "else: # new_var_list = var_list # return new_var_list # def", "tmp def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\" Create vector field torch Parameter", "0.02) init.constant(m.bias.data, 0.0) def weights_init_kaiming(m): classname = m.__class__.__name__ # print(classname)", "init.constant(m.bias.data, 0.0) def weights_init_rd_normal(m): classname = m.__class__.__name__ # print(classname) if", ":,-2] - tensor[:, :, :,-3] if dim==3: tensor[:, :,:, -1,:,", "gaussian_std_weights] for g in range(nr_of_mg_weights): weights[:, g, ...] = gaussian_std_weights[g]", "phi[0, ...] + Ab[2] * phi[1, ...] + Ab[4] #", "idnp[1,:, :] = id[1] elif dim==3: idnp = np.zeros([3,sz[0], sz[1],", "implemented\") return warped_label_map def t2np(v): \"\"\" Takes a torch array", "# id = np.mgrid[0:sz[0]] # elif dim == 2: #", "1: id = id.reshape(1, sz[0]) # add a dummy first", "dim).transpose(1, 2) Cd = Cd.view(Cd.shape[0], dim+1, dim).transpose(1, 2) updated_param =", "Ab[4] * phi[1, ...] + Ab[7] * phi[2, ...] +", "transformed map \"\"\" sz = phi.size() dim = len(sz) -", "id[d] -= spacing[d]*(sz[d]//2) # else: # #odd # id[d] -=", "m = m * self.mask input = self.prepare_data(m,new_s) x= input", "'float64', ...) :return: returns the identity map \"\"\" dim =", "classname.find('Conv') != -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('Linear') != -1: init.orthogonal(m.weight.data,", "tensor[:,:-2]-tensor[:,:-3] if dim==2: tensor[:, :, -1,:] = tensor[:, :,-2,:] +", "\"\"\" return Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7)) def create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False): \"\"\"Create vector field", "not is_numpy else ID.numpy(), newspacing def get_res_size_from_size(sz, factor): \"\"\" Returns", ":return: list of parameters \"\"\" pl = [] for key", "image. :param I0: image to warp, image size XxYxZ :param", "relu def __init__(self, in_channels, out_channels, kernel_size, stride=1, active_unit='relu', same_padding=False, bn=False,", "if current_dim == dim: return A else: return A.reshape([1]*(dim-current_dim)+list(A.shape)) def", "a simple conv. implementation, generate displacement field \"\"\" def __init__(self,", "image spacing [dx,dy,dz] :return: returns the warped image of size", "just to make sure it is a numpy array csz", "= fdt.dYc(I)*lam elif dim == 3: m[:, 0, :, :,", ".spline_interpolation import SplineInterpolation_ND_BCXYZ import os try: from .libraries.functions.nn_interpolation import get_nn_interpolation", "active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if", "padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim + 1, 20,", "for d in range(dim): id[d]*=spacing[d] #id[d]*=2./(sz[d]-1) #id[d]-=1. # and now", "and v.size == 1: return float(v) def checkNan(x): \"\"\"\" input", "is a numpy array csz = np.array([nr_of_images, dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,", "\"\"\" if (factor is None) or (factor >= 1): print('WARNING:", "is a numpy array csz = np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights = torch.empty(*csz)", "* factor))).astype('int16') return low_res_sz def _compute_low_res_image(I, spacing, low_res_size, spline_order): import", "if dim == 1: id = np.mgrid[0:sz[0]] elif dim ==", "parameters \"\"\" dim = 0 if Ab.shape[1]==2: dim = 1", "map') # # min_spacing = np.min(spacing) # spacing_ratio = spacing/min_spacing", "classname.find('Linear') != -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data,", ":]) def get_inverse_affine_param(Ab): \"\"\"Computes inverse of affine transformation. Formally: C(Ax+b)+d", "\"\"\" nr_of_mg_weights = len(gaussian_std_weights) csz = np.array(sz) # just to", "if get_preweight_from_network: tmp.requires_grad = True else: tmp = Parameter(tmp) return", "0.5*spacing # # :param sz: just the spatial dimensions, i.e.,", "column vector Ab to the identity transform. :param Ab: Affine", "Cd = Cd.view(Cd.shape[0], dim+1, dim).transpose(1, 2) updated_param = torch.zeros_like(Ab) for", "bn=using_bn)] if using_sigmoid: net += [nn.Sigmoid()] self.net = nn.Sequential(*net) def", "= desiredSize.copy() desiredSize[0] = n_batch identity_map = identity_map[:n_batch] resampled, new_spacing", ":,-2] + tensor[:, :, :,-2] - tensor[:, :, :,-3] if", "g elif dim == 2: g = np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) - np.power(X[1,:,", "x class FcRel(nn.Module): # fc+ relu(option) def __init__(self, in_features, out_features,", "implementation, generate displacement field \"\"\" def __init__(self, inputs, dim, net_sched=None):", "be in the right format model_pars = ind_pars else: #", "for n in range(nrOfI): id[n,...] = identity_map(sz[2::],spacing,dtype=dtype) return id def", "- tensor[:, :,-3,:] tensor[:, :, :,-1] = tensor[:, :, :,-2]", "in dimensions 1-3') return m def create_ND_vector_field_variable_multiN(sz, nr_of_images=1): \"\"\" Create", "the identity map of dimension dimxXxYxZ \"\"\" dim = len(sz)", ":, :, -2] - tensor[:, :, :, -3] def get_resampled_image(I,", "phi[1, ...] + Ab[5] # a_12x+a_22y+b2 elif dim == 3:", "organize_data(m, new_s, sched='depth_concat') elif self.net_sched == 'm_f_s_t': input = organize_data(m,", "all columns stacked on top of each other. :param Ab:", "= id[0] elif dim == 2: idnp = np.zeros([2, sz[0],", "channels and add the results m = m + compute_vector_momentum_from_scalar_momentum_multiN(lam[:,", "classname.find('Conv') != -1: init.normal(m.weight.data) elif classname.find('Linear') != -1: init.normal(m.weight.data) elif", "* phi[1, ...] + Ab[7] * phi[2, ...] + Ab[10]", "B x pars (batch size x param. vector); will be", "SplineInterpolation_ND_BCXYZ import os try: from .libraries.functions.nn_interpolation import get_nn_interpolation except ImportError:", "== 2: return _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim ==", "lam: scalar momentum, batchxXxYxZ :param I: image, batchXxYxZ :param sz:", "# id[d] *= spacing[d] # if sz[d]%2==0: # #even #", "= organize_data(m, self.s, sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') elif", "only add dimensions, but not remove them') if current_dim ==", "classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.uniform(m.weight.data,", "current_dim > dim: raise ValueError('Can only add dimensions, but not", "np.array(id.astype(dtype)) # if dim == 1: # id = id.reshape(1,", "the identity map') return idnp def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05): \"\"\"generate a", "nrOfI, sz[2::], spacing) return m def compute_vector_momentum_from_scalar_momentum_multiN(lam, I, nrOfI, sz,", "or (factor >= 1): print('WARNING: Could not compute low_res_size as", "+ str( type(v.data))) def lift_to_dimension(A, dim): \"\"\"Creates a view of", "spacing_ratio[0] # idnp[1, :, :] = id[1] * spacing_ratio[1] #", "dim = 0 if Ab.shape[1]==2: dim = 1 elif Ab.shape[1]==6:", "= id[1] * spacing_ratio[1] # elif dim == 3: #", "param['forward_model']['smoother']['omt_power'] # omt_weight_penalty = param['forward_model']['smoother']['omt_weight_penalty'] # min_std = torch.min(stds) #", ":param spacing: spx spy spz :param desiredSize: B C X", "2 elif nr==12: return 3 else: raise ValueError('Only supports dimensions", "raise ValueError('Currently only orders 0 to 9 are supported') if", "= torch.from_numpy(g) def weights_init_uniform(m): classname = m.__class__.__name__ # print(classname) if", "if os.path.isfile(abs_t_with_right_ext): if os.path.samefile(abs_s,abs_t_with_right_ext): # nothing to do here, these", "import os try: from .libraries.functions.nn_interpolation import get_nn_interpolation except ImportError: print('WARNING:", "in [0, 1, 2, 3, 4, 5, 6, 7, 8,", "a (high-res) sz. :param sz: size (high-res) :param factor: low-res", "= np.array(list(I.size())) # check that the batch size and the", "2, 3, 4, 5, 6, 7, 8, 9]: raise ValueError('Currently", "idnp # # def centered_min_normalized_identity_map(sz, spacing, dtype='float32'): # \"\"\" #", "transforms') if dim != len(sz)-2: raise ValueError('Incompatible number of affine", "3D) :param nrOfI: number of images :param nrOfC: number of", "an identity map. :param sz: just the spatial dimensions, i.e.,", "* factor))).astype('int16') else: lowResSize[2::] = (np.ceil((np.array(sz[2:]) * np.array(factor)))).astype('int16') if lowResSize[-1]", "if os.path.samefile(abs_s,abs_t_with_right_ext): # nothing to do here, these are already", "now store it in a dim+1 array if dim==1: idnp", "np.zeros([1, sz[0]], dtype=dtype) idnp[0, :] = id[0] elif dim ==", "# id = np.array(id.astype(dtype)) # if dim == 1: #", "active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid:", ":param sig: array indicating the standard deviations for the different", "low_res_sz = np.array(sz) low_res_sz[2::] = (np.ceil((np.array(sz[2::]) * factor))).astype('int16') return low_res_sz", "-3, :] tensor[:, :,:, :, -1, :] = tensor[:, :,", "= os.path.abspath(tf) root_t,ext_t = os.path.splitext(abs_t) abs_t_with_right_ext = root_t + ext_s", "dimxXxYxZ \"\"\" dim = len(sz) if dim == 1: id", "spacing[spacing>min_sp]=min_sp return spacing def time_warped_function(f): def __time_warped_function(input=None): start = torch.cuda.Event(enable_timing=True)", "self.bn = nn.BatchNorm2d(out_channels, eps=0.0001, momentum=0, affine=True) if bn else None", "field of size nrOfIxnrOfCxXxYxZ \"\"\" csz = np.array(sz) # just", "the standard deviations for the different dimensions :return: Normalized Gaussian", "in dimensions 1-3') def _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True):", "just the spatial sizes (e.g., [5] in 1D, [5,10] in", "return mask.detach() def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2): \"\"\"generate a smooth weight mask", "dimensions 1 to 3') def _get_low_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\"Computes spacing", "m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: space_normal(m.weight.data) elif classname.find('Linear')", "int((kernel_size - 1) // 2) if same_padding else 0 if", "= len(sz) - 1 if dim not in [1,2,3]: raise", "\"\"\" sz = phi.size() dim = get_dim_of_affine_transform(Ab[0,:]) nr_of_images = Ab.size()[0]", "was ' + str(factor)) return np.array(sz) else: low_res_sz = np.array(sz)", "float): return v elif isinstance(v, np.ndarray) and v.size == 1:", "0.0) def weights_init_normal(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv')", ". import smoother_factory as sf from .data_wrapper import USE_CUDA import", ">>> print(compute_normalized_gaussian(X, mu, sig) \"\"\" dim = len(mu) if dim", "momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum, batchxXxYxZ :param I:", "in range(dim): id[d] *= spacing[d] if sz[d]%2==0: #even id[d] -=", "np.power(sig[2], 2.))) g = g / g.sum() return g else:", "self.net_sched == 'm_d_s_f_t': if debugging: self.net = nn.Conv2d(self.dim + 2,", "spacing for the low-res parametrization from image spacing. :param spacing:", "be available.') def my_hasnan(x): \"\"\"Check if any input elements are", "elif self.net_sched =='m_f_s': if debugging: self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size,", "tensor[:, :, -2, :] + tensor[:, :, -2, :] -", "the centered identity map') # # return idnp # #", "np.zeros([1, sz[0]], dtype=dtype) idnp[0,:] = id[0] elif dim==2: idnp =", "len(img_sz) mask_sz = [1,1]+ list(img_sz) mask = AdaptVal(torch.zeros(*mask_sz)) if dim", "= sz[1] for c in range(nrOfC): # loop over all", "/ ( desiredSizeNC[2::].astype('float') - 1.)) ########################################### if identity_map is not", "\"\"\" Create an identity map :param sz: size of an", "np.array(sz) if not isinstance(factor, list): lowResSize[2::] = (np.ceil((np.array(sz[2:]) * factor))).astype('int16')", "os.symlink(abs_s,abs_t_with_right_ext) def combine_dict(d1,d2): \"\"\"Creates a dictionary which has entries from", ":, :] = id[1] * spacing_ratio[1] # idnp[2, :, :,", "dimensions 1-3 are currently supported for the identity map') return", "def identity_map(sz,spacing,dtype='float32'): \"\"\" Returns an identity map. :param sz: just", "memory id. :param pd: parameter dictionary :return: tuple of (parameter_list,", "subsequent sums work (hence will be smaller than it could", "way. \"\"\" from __future__ import print_function from __future__ import absolute_import", "batchxnrCxXxYxZ (nrC corresponds to dimension) :return: returns transformed maps \"\"\"", "sampler = IS.ResampleImage() low_res_image, _ = sampler.downsample_image_to_size(I, spacing, low_res_size[2::],spline_order) return", ":param desiredSize: B C X Y Z :param spline_order: :param", "isinstance(I, torch.Tensor): I = torch.Tensor(I) is_numpy = True sz =", "if classname.find('Conv') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('Linear') !=", "== 'rd_normal': net.apply(weights_init_rd_normal) elif init_type == 'normal': net.apply(weights_init_normal) elif init_type", "len(gaussian_std_weights) csz = np.array(sz) # just to make sure it", "np.mgrid[0:sz[0]] elif dim == 2: id = np.mgrid[0:sz[0], 0:sz[1]] elif", "min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif v.data.dtype == torch.HalfTensor or type(v.data) == torch.cuda.HalfTensor:", "to the identity transform. :param Ab: Affine parameter vector (will", ":param spacing: spacing of image :return: returns the vector momentum", "elements. :param nr_of_elements: number of vector elements :return: returns the", "return tmp def create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1, nrOfC=1): \"\"\" Create vector field", "omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) # sz = [1]+ [len(stds)] +[1]*(dim+1) # return omt_const.view(*sz)", "bn else None if active_unit == 'relu': self.active_unit = nn.ReLU(inplace=True)", "are currently supported for the identity map') # now get", "coordinates at which to evaluate :param mu: array indicating the", "store it in a dim+1 array if dim==1: idnp =", "mu[1], 2.) / (2 * np.power(sig[1], 2.))) g = g/g.sum()", "sz[0], sz[1]], dtype=dtype) idnp[0, :, :] = id[0] idnp[1, :,", "make sure it is a numpy array csz = np.array([dim]+list(csz))", "param. vector) :return: Updated affine parameters \"\"\" dim = 0", "Otherwise shifts everything by 0.5*spacing # # :param sz: just", "# # now get it into range [0,(sz-1)*spacing]^d # id", "# and now store it in a dim+1 array and", "find a cleaner way of handling this # this is", "net += [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_f_s_t':", "self.get_net_sched() #self.net.register_backward_hook(bh) def get_net_sched(self, debugging=True, using_bn=True, active_unit='relu', using_sigmoid=False , kernel_size=5):", "zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order == 1: stn = STN_ND_BCXYZ(spacing,zero_boundary,", "Ab[8]=1. else: raise ValueError('Only supports dimensions 1, 2, and 3.')", "dtype=dtype) idnp[0, :, :] = id[0] idnp[1, :, :] =", ":, :dim] = tm_inv Ab_inv[n, :, dim] = - torch.matmul(tm_inv,", "file return else: os.remove(abs_t_with_right_ext) # now we can do the", "Output\") print(torch.sum(go[0].data)) return gi[0], gi[1], gi[2] class ConvBnRel(nn.Module): # conv", "raise ValueError('Can only compute Gaussians in dimensions 1-3') def _compute_warped_image_multiNC_1d(I0,", ":], phi[nrI, ...]) return phiR def compute_normalized_gaussian(X, mu, sig): \"\"\"Computes", ":] - mu[1], 2.) / (2 * np.power(sig[1], 2.))) g", "= np.exp(-np.power(X[0,:, :, :] - mu[0], 2.) / (2 *", "format :param spacing: list with spacing information [sx,sy,sz] :param dtype:", "1-3 are currently supported for the centered identity map') #", "not None: idDes = identity_map else: idDes = AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing)))", "bn=using_bn), ConvBnRel(20,self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid: net +=", "dim==2: idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0,:, :] =", "given size :param sz: just the spatial sizes (e.g., [5]", "spacing, spline_order, zero_boundary=False, use_01_input=True): if spline_order not in [0, 1,", "1, 2, and 3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1, 2)", "= sf.SmootherFactory(sz, spacing).create_smoother(s_m_params) return s_m def get_warped_label_map(label_map, phi, spacing, sched='nn'):", "return lowResSize def get_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\" Computes spacing for", "a=0, mode='fan_in') elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data,", "# spacing_ratio = spacing/min_spacing # # # # now get", ":, :, :] = id[0] * spacing_ratio[0] # idnp[1, :,", "import USE_CUDA import numpy as np from . import finite_differences", "get it into range [0,(sz-1)*spacing]^d id = np.array(id.astype(dtype)) if dim", "tensor[:, :, :, -2] - tensor[:, :, :, -3] tensor[:,", "phi: map for the warping, size BxdimxXxYxZ :param spacing: image", "return _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 2: return", "zero_boundary: :param identity_map: :return: \"\"\" if spacing is None: img_sz", "= -Cb :param Ab: B x pars (batch size x", "id = np.array( id.astype(dtype) ) if dim==1: id = id.reshape(1,sz[0])", "padding=padding, bias=bias) else: self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding=padding,bias=bias)", "compute Gaussians in dimensions 1-3') def _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order,", "floating point: torch.HalfTensor, torch.cuda.HalfTensor # todo: maybe find a cleaner", "- 1.) / ( desiredSizeNC[2::].astype('float') - 1.)) ########################################### if identity_map", "* phi[0, ...] + Ab[2] * phi[1, ...] + Ab[4]", "correct way of doing it return spacing * (np.array(sz[2::]) -", "currently supported for the identity map') return idnp def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std", "init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def init_weights(net, init_type='normal'): print('initialization method", "return d def get_parameter_list_from_parameter_dict(pd): \"\"\"Takes a dictionary which contains key", "input = organize_data(input, self.t, sched='depth_concat') elif self.net_sched == 'm_f_s_t': input", "dtype=dtype) elif dim == 2: id = np.zeros([nrOfI, 2, sz[2],", "(np.array(sz[2::])-1) / (np.array(lowResSize[2::])-1) def _get_low_res_size_from_size(sz, factor): \"\"\"Returns the corresponding low-res", "raise ValueError('Only supports dimensions 1, 2, and 3.') def set_affine_transform_to_identity_multiN(Ab):", ":] = id[1] elif dim==3: idnp = np.zeros([3,sz[0], sz[1], sz[2]],", "CAx+Cb+d :param Ab: B x pars (batch size x param.", "but values of this size should not occur in practice", "np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) idnp[0, :, :, :] =", "B C X Y Z :param spacing: spx spy spz", "Adaptive Net ###################################################3 def space_normal(tensors, std=0.1): \"\"\" space normalize for", "dim, not nrOfC nrOfC = sz[1] for c in range(nrOfC):", "abs_t_with_right_ext = root_t + ext_s if os.path.isfile(abs_t_with_right_ext): if os.path.samefile(abs_s,abs_t_with_right_ext): #", "-1,:, :] = tensor[:, :, -2, :] + tensor[:, :,", "of dimension dimxXxYxZ \"\"\" dim = len(sz) if dim==1: id", "None def forward(self, x): x = self.conv(x) if self.bn is", "# # # now get it into range [0,(sz-1)*spacing]^d #", "use_01_input=True): \"\"\"Warps image. :param I0: image to warp, image size", "size. :param sz: just the spatial sizes (e.g., [5] in", "will be smaller than it could be, # but values", "dimensions 1, 2, and 3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1,2)", "except ImportError: print('WARNING: nn_interpolation could not be imported (only supported", "warping, size BxdimxXxYxZ :param spacing: image spacing [dx,dy,dz] :return: returns", "a numpy array csz = np.array([nr_of_images, dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0., 1e-7)", "sz[0]) # add a dummy first index for d in", "2: id = np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif dim == 3: id =", "== 2: # idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) #", "sf.SmootherFactory(sz, spacing).create_smoother(s_m_params) return s_m def get_warped_label_map(label_map, phi, spacing, sched='nn'): if", "method [%s] is not implemented' % init_type) def organize_data(moving, target,", "sched='depth_concat'): if sched == 'depth_concat': input = torch.cat([moving, target], dim=1)", "nn.Sequential(*net) elif self.net_sched == 'm_d_s_f_t': if debugging: self.net = nn.Conv2d(self.dim", "def get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params = pars.ParameterDict() s_m_params['smoother']['type'] = 'gaussian' s_m_params['smoother']['gaussian_std'] =", "numpy as np from . import finite_differences as fd import", "dtype=dtype) elif dim == 3: id = np.zeros([nrOfI, 3, sz[2],", "inv_affine_param def update_affine_param(Ab, Cd): \"\"\"Update affine parameters. Formally: C(Ax+b)+d =", "phi[1, ...] + Ab[4] # a_11x+a_21y+b1 phiR[1, ...] = Ab[1]", "image, BxCxXxYxZ :param sz: size of image :param spacing: spacing", "d2: dictionary 2 :return: resulting dictionary \"\"\" d = d1.copy()", "track of the keys based on memory id. :param pd:", "returns the vector momentum \"\"\" nrOfI = sz[0] # number", "index for d in range(dim): id[d] *= spacing[d] if sz[d]%2==0:", "be add assert assert abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))< 0.1, \"nn interpolation is", "Ab: Affine parameter vector (will be overwritten with the identity", "this map for resampling ID = compute_warped_image_multiNC(I, idDes, newspacing, spline_order,", "BxdimxXxYxZ :param spacing: image spacing [dx,dy,dz] :return: returns the warped", "omt_const = torch.abs(torch.log(max_std/stds))**omt_power # omt_const = omt_const/(torch.abs(torch.log(max_std / min_std)) **", "in var_list] # else: # new_var_list = var_list # return", "and self.net_input padding_size = (kernel_size-1)//2 if self.net_sched == 'm_only': if", "dim==1: idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0,:] = id[0] elif", "- tensor[:, :, :, -3] def get_resampled_image(I, spacing, desiredSize, spline_order=1,", "' + str(factor)) return np.array(sz) else: low_res_sz = np.array(sz) low_res_sz[2::]", "def individual_parameters_to_model_parameters(ind_pars): model_pars = dict() if type(ind_pars) == type(dict()): #", "3: id = np.zeros([nrOfI, 3, sz[2], sz[3], sz[4]], dtype=dtype) else:", "MyTensor(sz).zero_().type_as(phi) if dim == 1: phiR = phi * Ab[0]", "= omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) # sz = [1]+ [len(stds)] +[1]*(dim+1) # return", "BxCxXxYxZ format) :param spacing: array describing the spatial spacing :param", "of the form y=Ax+b stored in a column vector. For", "desiredSizeNC[2::].astype('float') - 1.)) ########################################### if identity_map is not None: idDes", "Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1), requires_grad = True) self.get_net_sched() #self.net.register_backward_hook(bh) def get_net_sched(self, debugging=True,", "self.net and self.net_input padding_size = (kernel_size-1)//2 if self.net_sched == 'm_only':", "Affine parameter vector (will be overwritten with the identity transform)", "= IS.ResampleImage() low_res_image, _ = sampler.downsample_image_to_size(I, spacing, low_res_size[2::],spline_order) return low_res_image", "sure it is a numpy array csz = np.array([nrOfI, dim]+list(csz))", "nr_of_mg_weights = len(gaussian_std_weights) csz = np.array(sz) # just to make", "= [var*spacing_ratio_t if var is not None else None for", "[5,10] in 2D, [5,10,10] in 3D) :return: returns vector field", "# omt_power = param['forward_model']['smoother']['omt_power'] # omt_weight_penalty = param['forward_model']['smoother']['omt_weight_penalty'] # min_std", "tm_param = torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim] = tm_param updated_param[n,:,dim] = torch.matmul(Cd[n,:,:dim], Ab[n,:,dim])", "torch array :return: numpy array \"\"\" dim = len(v.shape)-2 if", "dim = I0.dim()-2 if dim == 1: return _compute_warped_image_multiNC_1d(I0, phi,", "add a dummy first index # # for d in", "ValueError('Only supports dimensions 1, 2, and 3.') def set_affine_transform_to_identity(Ab): \"\"\"Sets", "m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in')", "# # # and now store it in a dim+1", "max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif v.data.dtype == torch.HalfTensor or type(v.data) == torch.cuda.HalfTensor: return", "orders 0 to 9 are supported') if spline_order == 0:", "sz[0]) # add a dummy first index # # for", "\"\"\" sz = Ab.size() nr_of_images = sz[0] for nrI in", "\"\"\" return (v.detach()).cpu().numpy() def cxyz_to_xyzc( v ): \"\"\" Takes a", "nrOfI: number of images :return: returns vector field of size", "= np.zeros(dim) stds = std * np.ones(dim) print('WARNING: What should", "_compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 3: return _compute_warped_image_multiNC_3d(I0,", "dtype: numpy data-type ('float32', 'float64', ...) :return: returns the identity", "elif sched == 'list_concat': input = torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif sched ==", "...) :return: returns the identity map \"\"\" dim = len(sz)-2", "image dim, not nrOfC nrOfC = sz[1] for c in", "_compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not in [0, 1,", "1] + list(I0.size()))), phi.view(torch.Size([1] + list(phi.size()))), spacing, spline_order, zero_boundary, use_01_input)", "= id[1] elif dim == 3: idnp = np.zeros([3, sz[0],", "!= sz[0]: raise ValueError('Incompatible number of affine transforms') if dim", "= param['forward_model']['smoother']['omt_weight_penalty'] # min_std = torch.min(stds) # max_std = torch.max(stds)", "= organize_data(m, new_s, sched='depth_concat') elif self.net_sched == 'm_f_s_t': input =", "as factor was ' + str(factor)) return sz else: lowResSize", "########################################## Adaptive Net ###################################################3 def space_normal(tensors, std=0.1): \"\"\" space normalize", "== 'xavier': net.apply(weights_init_xavier) elif init_type == 'kaiming': net.apply(weights_init_kaiming) elif init_type", "' + str(factor)) return sz else: lowResSize = np.array(sz) if", "import finite_differences as fd import torch.nn as nn import torch.nn.init", "# raise ValueError('Only dimensions 1-3 are currently supported for the", "array indicating the standard deviations for the different dimensions :return:", "% 2 != 0: lowResSize[-1] -= 1 print( '\\n\\nWARNING: forcing", "sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) if pow ==2: mask", "if sz[d]%2==0: #even id[d] -= spacing[d]*(sz[d]//2) else: #odd id[d] -=", "fix for symmetric training if I.shape[0] != identity_map.shape[0]: n_batch =", "np.min(spacing) # spacing_ratio =min_spacing/spacing # dim = spacing.size # spacing_ratio_t", "idDes = identity_map else: idDes = AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing))) # now", "net kernel :param tensor: :param mean: :param std: :return: \"\"\"", "self.t, sched='depth_concat') elif self.net_sched == 'm_d_s_f_t': input = organize_data(m, new_s,", "spline_order == 1: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=True, use_01_input=use_01_input) else:", "= torch.empty(*csz) # set the default if sched =='w_K_w': gaussian_std_weights", "int(sz[0]) if dim == 1: id = np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif dim", "= len(sz) - 2 nrOfI = sz[0] if dim ==", "get_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\" Computes spacing for the low-res parameterization", "list(I0.size()))), phi.view(torch.Size([1] + list(phi.size()))), spacing, spline_order, zero_boundary, use_01_input) return Iw.view(I0.size())", "in pd: pl.append(pd[key]) return pl def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same as get_parameter_list_from_parameter_dict;", "m def compute_vector_momentum_from_scalar_momentum_multiN(lam, I, nrOfI, sz, spacing): \"\"\"Computes the vector", "vectors B x pars (batch size x param. vector); will", "init.constant(m.bias.data, 0.0) def weights_init_xavier(m): classname = m.__class__.__name__ # print(classname) if", "dim = 1 elif Ab.shape[1]==6: dim = 2 elif Ab.shape[1]==12:", "\"\"\"Creates a dictionary which has entries from both of them.", "= std * np.ones(dim) print('WARNING: What should the spacing be", "-1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_orthogonal(m): classname =", "vector); will be overwritten with identity trans. :return: \"\"\" sz", "idnp[1, :, :, :] = id[1] idnp[2, :, :, :]", "the second dimension here is image dim, not nrOfC nrOfC", "spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 2: return _compute_warped_image_multiNC_2d(I0, phi, spacing,", "- 1) ########################################## Adaptive Net ###################################################3 def space_normal(tensors, std=0.1): \"\"\"", "1e-7)) def create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False): \"\"\"Create vector field torch Parameter of", "if not isinstance(I, torch.Tensor): I = torch.Tensor(I) is_numpy = True", "(2 * np.power(sig[1], 2.))) g = g/g.sum() return g elif", "currently supported for the identity map') # # min_spacing =", "g = np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) - np.power(X[1,:, :] - mu[1], 2.) /", "spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image. :param I0: image to warp,", "# if dim == 1: # id = id.reshape(1, sz[0])", "def bh(m,gi,go): print(\"Grad Input\") print((torch.sum(gi[0].data), torch.sum(gi[1].data))) print(\"Grad Output\") print(torch.sum(go[0].data)) return", "of image :param spacing: spacing of image :return: returns the", "be used as an input to an optimizer. :param pd:", "array if dim==1: idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0,:] =", "vector is simply [a1;a2;a3;b], i.e., all columns stacked on top", "= nn.Conv2d(2, 2, kernel_size, 1, padding=padding_size, bias=False,groups=2) else: net =", "kernel_size, 1, padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim +", "else: net = \\ [ConvBnRel(self.dim + 2, 20, 5, active_unit=active_unit,", "== 3: id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] else: raise ValueError('Only", "affine parameters \"\"\" dim =0 if Ab.shape[1] == 2: dim", "this code') spacing = np.ones(dim) centered_id = centered_identity_map(sz,spacing) g =", "= [1]+ [len(stds)] +[1]*(dim+1) # return omt_const.view(*sz) def get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params", "len(sz) - 1 if dim not in [1,2,3]: raise ValueError('Only", "2D, [5,10,10] in 3D) :param nrOfI: number of images :param", "min_spacing = np.min(spacing) # spacing_ratio =min_spacing/spacing # dim = spacing.size", "range(nr_of_images): phiR[nrI, ...] = apply_affine_transform_to_map(Ab[nrI, :], phi[nrI, ...]) return phiR", "tensors[n][c].size() mus = np.zeros(dim) stds = std * np.ones(dim) print('WARNING:", "Ab[11] else: raise ValueError('Only supports dimensions 1, 2, and 3.')", "for the identity map') for n in range(nrOfI): id[n,...] =", "(x != x).any() def create_symlink_with_correct_ext(sf, tf): abs_s = os.path.abspath(sf) ext_s", "os.path.abspath(tf) root_t,ext_t = os.path.splitext(abs_t) abs_t_with_right_ext = root_t + ext_s if", "ValueError('Unknown data type: ' + str( type(v.data))) def lift_to_dimension(A, dim):", "tensor[:, :,:, :, -1, :] = tensor[:, :, :, -2]", "return v def get_scalar(v): if isinstance(v, float): return v elif", "relu(option) def __init__(self, in_features, out_features, active_unit='relu'): super(FcRel, self).__init__() self.fc =", "utility functions. .. todo:: Reorganize this package in a more", "low_res_size as factor was ' + str(factor)) return np.array(sz) else:", "array and returns it as a numpy array on the", "[1, 2, 3]: raise ValueError('Only supports dimensions 1, 2, and", "-1: init.uniform(m.weight.data, 0.038, 0.042) elif classname.find('Linear') != -1: init.uniform(m.weight.data, 0.0,", "== 3: return _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) else: raise ValueError('Images", "s_m_params = pars.ParameterDict() s_m_params['smoother']['type'] = 'gaussian' s_m_params['smoother']['gaussian_std'] = gaussian_std s_m", "warp, image size BxCxXxYxZ :param phi: map for the warping,", "not None: # todo will remove, currently fix for symmetric", "m elif self.net_sched == 'm_f_s': input = organize_data(m,self.s,sched='depth_concat') elif self.net_sched", ":param I: B C X Y Z :param spacing: spx", "id[0] elif dim == 2: idnp = np.zeros([2, sz[0], sz[1]],", "# idnp[0, :, :] = id[0] * spacing_ratio[0] # idnp[1,", "phi[2, ...] + Ab[11] else: raise ValueError('Only supports dimensions 1,", "net = \\ [ConvBnRel(self.dim, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,self.dim,", "and 3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1, 2) Cd =", "else: #odd id[d] -= spacing[d]*((sz[d]+1)//2) # and now store it", "gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0)", "# return the self.net and self.net_input padding_size = (kernel_size-1)//2 if", "id.reshape(1, sz[0]) # add a dummy first index for d", "return pl, par_to_name_dict def remove_infs_from_variable(v): # 32 - bit floating", "phiR = MyTensor(sz).zero_().type_as(phi) if dim == 1: phiR = phi", "def compute_warped_image_multiNC(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image. :param", "image, the new spacing after downsampling \"\"\" desiredSize = desiredSize[2:]", "is not implemented\") return warped_label_map def t2np(v): \"\"\" Takes a", "0 in the middle) if the sz is odd Otherwise", "(np.array(img_sz) - 1) if identity_map is not None: # todo", "ID if not is_numpy else ID.numpy(), newspacing def get_res_size_from_size(sz, factor):", "= gaussian_std s_m = sf.SmootherFactory(sz, spacing).create_smoother(s_m_params) return s_m def get_warped_label_map(label_map,", "def _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not in [0,", "classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.xavier_normal(m.weight.data,", "'width_concat': input = torch.cat((moving, target), dim=3) elif sched == 'list_concat':", "isinstance(v, float): return v elif isinstance(v, np.ndarray) and v.size ==", "elif dim == 3: id = np.zeros([nrOfI, 3, sz[2], sz[3],", "def prepare_data(self, m, new_s): input=None if self.net_sched == 'm_only': input", "ext_s if os.path.isfile(abs_t_with_right_ext): if os.path.samefile(abs_s,abs_t_with_right_ext): # nothing to do here,", "np.array([nrOfI, dim]+list(csz)) if get_field_from_external_network: tmp = MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad = True", "phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 2: return _compute_warped_image_multiNC_2d(I0, phi,", "returns vector field of size nrOfIxdimxXxYxZ \"\"\" dim = len(sz)", "= torch.inverse(Ab[n, :, :dim]) Ab_inv[n, :, :dim] = tm_inv Ab_inv[n,", "elif dim == 2: phiR[0, ...] = Ab[0] * phi[0,", "net_sched=None): # settings should include [using_bias, using bn, using elu]", "the identity map of dimension dimxXxYxZ # \"\"\" # dim", "as fd import torch.nn as nn import torch.nn.init as init", "newspacing))) # now use this map for resampling ID =", "inputs['t'].detach() self.mask = Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1), requires_grad = True) self.get_net_sched() #self.net.register_backward_hook(bh)", "'Some functionality may not be available.') def my_hasnan(x): \"\"\"Check if", "id[0] elif dim==2: idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0,:,", ":return: returns view of A of appropriate dimension \"\"\" current_dim", "# # # # now get it into range [0,(sz-1)*spacing]^d", "* phi[2, ...] + Ab[9] phiR[1, ...] = Ab[1] *", "spacing, sched='nn'): if sched == 'nn': warped_label_map = compute_warped_image_multiNC(label_map, phi,", ":math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum, BxCxXxYxZ :param I: image,", "dim==3: idnp = np.zeros([3,sz[0], sz[1], sz[2]], dtype=dtype) idnp[0,:, :, :]", "\"\"\" dim = len(sz) - 2 nrOfI = sz[0] if", "dim = len(sz) m = create_ND_vector_field_variable_multiN(sz, nrOfI) if dim ==", "0:sz[1]] elif dim == 3: id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]]", "== 3: # idnp = np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype)", "id = np.mgrid[0:sz[0],0:sz[1]] elif dim==3: id = np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else: raise", "torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif v.data.dtype == torch.DoubleTensor or type(v.data) ==", "raise ValueError('Incompatible number of affine transforms') phiR = MyTensor(sz).zero_().type_as(phi) for", "1 to 3') def _get_low_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\"Computes spacing for", "range import torch from torch.nn.parameter import Parameter from torch.autograd import", ":param nrOfI: number of images :param nrOfC: number of channels", ":] = tensor[:, :, :, -2] + tensor[:, :, :,", "code') raise ValueError('Double check the spacing here before running this", "= organize_data(input, self.t, sched='depth_concat') return input def forward(self, m,new_s=None): m", "var_list # return new_var_list # def identity_map(sz,spacing,dtype='float32'): \"\"\" Returns an", "0, :] = fdt.dXc(I)*lam elif dim == 2: m[:, 0,", "inputs should be a dictionary could contain ['s'],['t'] super(AdpSmoother, self).__init__()", "= stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True):", "if dim not in [1,2,3]: raise ValueError('Only supports dimensions 1,", "if type(ind_pars) == type(dict()): # should already be in the", "return (v.detach()).cpu().numpy() def cxyz_to_xyzc( v ): \"\"\" Takes a torch", ":, :dim]) Ab_inv[n, :, :dim] = tm_inv Ab_inv[n, :, dim]", "tensor[:, :,-2,:] + tensor[:, :,-2,:] - tensor[:, :,-3,:] tensor[:, :,", "= m.__class__.__name__ print(classname) if classname.find('Conv') != -1: init.orthogonal(m.weight.data, gain=1) elif", "= np.array([nrOfI, nrOfC] + list(desiredSize)) newspacing = spacing * ((sz[2::].astype('float')", "fdt.dYc(I)*lam elif dim == 3: m[:, 0, :, :, :]", "_get_low_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\"Computes spacing for the low-res parametrization from", "a=0, mode='fan_in') elif classname.find('Linear') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif", "def identity_map_multiN(sz,spacing,dtype='float32'): \"\"\" Create an identity map :param sz: size", "== 'depth_concat': input = torch.cat([moving, target], dim=1) elif sched ==", "import absolute_import # from builtins import str # from builtins", "of (parameter_list, name_dictionary) \"\"\" par_to_name_dict = dict() pl = []", "dtype=dtype) else: raise ValueError('Only dimensions 1-3 are currently supported for", "= np.min(spacing) spacing[spacing>min_sp]=min_sp return spacing def time_warped_function(f): def __time_warped_function(input=None): start", "(2 * np.power(sig[2], 2.))) g = g / g.sum() return", "(e.g., [5] in 1D, [5,10] in 2D, [5,10,10] in 3D)", "# Returns a centered identity map (with 0 in the", "field torch Parameter of given size :param sz: just the", ":param d1: dictionary 1 :param d2: dictionary 2 :return: resulting", "range(nrOfC): # loop over all the channels and add the", "= AdaptVal(torch.ones(*mask_sz))*mask_value if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1", "could contain ['s'],['t'] super(AdpSmoother, self).__init__() self.dim = dim self.net_sched =", "1), requires_grad = True) self.get_net_sched() #self.net.register_backward_hook(bh) def get_net_sched(self, debugging=True, using_bn=True,", "be imported (only supported in CUDA at the moment). '", "returns a tuple: the downsampled image, the new spacing after", "return spacing * (np.array(sz[2::])-1) / (np.array(lowResSize[2::])-1) def _get_low_res_size_from_size(sz, factor): \"\"\"Returns", "debugging=True, using_bn=True, active_unit='relu', using_sigmoid=False , kernel_size=5): # return the self.net", "using_bn=True, active_unit='relu', using_sigmoid=False , kernel_size=5): # return the self.net and", "-1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0,", "from .libraries.functions.nn_interpolation import get_nn_interpolation except ImportError: print('WARNING: nn_interpolation could not", "print(classname) if classname.find('Conv') != -1: init.uniform(m.weight.data, 0.038, 0.042) elif classname.find('Linear')", "spacing :param desiredSize: array for the desired size (excluding B", "if not is_numpy else ID.numpy(), newspacing def get_res_size_from_size(sz, factor): \"\"\"", "sz[0] # number of images m = create_ND_vector_field_variable_multiN(sz[2::], nrOfI) #", "only convert scalar to vector momentum in dimensions 1-3') return", "idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0,:] = id[0] elif dim==2:", "is not precise\" else: raise ValueError(\" the label warping method", "id[2] * spacing_ratio[2] # else: # raise ValueError('Only dimensions 1-3", "min_std)) ** omt_power) # omt_const = omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) # sz =", "resampling ID = compute_warped_image_multiNC(I, idDes, newspacing, spline_order, zero_boundary) return ID", "np.power(sig[1], 2.))) g = g/g.sum() return g elif dim ==", "pairs for model parameters and converts it into a list", "('float32', 'float64', ...) # :return: returns the identity map of", "numpy array csz = np.array([dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,1e-7) def create_vector_parameter(nr_of_elements): \"\"\"Creates", "0.0) def weights_init_rd_normal(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv')", "= fdt.dZc(I)*lam else: raise ValueError('Can only convert scalar to vector", "3: # idnp = np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) #", "# # def centered_min_normalized_identity_map(sz, spacing, dtype='float32'): # \"\"\" # Returns", "make sure it is a numpy array csz = np.array([nrOfI,nr_of_mg_weights]+list(csz))", "size (high-res) :param factor: low-res factor (needs to be <1)", "supported for the identity map') for n in range(nrOfI): id[n,", "map') return idnp def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05): \"\"\"generate a smooth weight", "end.record() # Waits for everything to finish running torch.cuda.synchronize() print(start.elapsed_time(end))", "of dimension dimxXxYxZ # \"\"\" # dim = len(sz) #", "conv. implementation, generate displacement field \"\"\" def __init__(self, inputs, dim,", ":, :, -1] = tensor[:, :, :, -2] + tensor[:,", "if dim==1: Ab.zero_() Ab[0]=1. elif dim==2: Ab.zero_() Ab[0]=1. Ab[3]=1. elif", "torch.cuda.synchronize() print(start.elapsed_time(end)) return output return __time_warped_function def interoplate_boundary_right(tensor): dim =", "active_unit == 'elu': self.active_unit = nn.ELU(inplace=True) else: self.active_unit = None", "dim == 3: return _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) else: raise", "str(factor)) return np.array(sz) else: low_res_sz = np.array(sz) low_res_sz[2::] = (np.ceil((np.array(sz[2::])", "m[:, 0, :, :, :] = fdt.dXc(I)*lam m[:, 1, :,", "a dim+1 array and rescale by the ratio # if", "var_list] # else: # new_var_list = var_list # return new_var_list", "spacing[d]*((sz[d]+1)//2) # # # and now store it in a", "== 'm_only': if debugging: self.net = nn.Conv2d(2, 2, kernel_size, 1,", "around 0) :param sz: size of an image in BxCxXxYxZ", "vector field torch Parameter of given size :param sz: just", "time_warped_function(f): def __time_warped_function(input=None): start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) start.record()", "field of size dimxXxYxZ \"\"\" dim = len(sz) csz =", "-1] = tensor[:, :, :, -2] + tensor[:, :, :,", "MyTensor(*(csz.tolist())).normal_(0., 1e-7) def create_ND_vector_field_variable(sz): \"\"\"Create vector field torch Variable of", "print(classname) if classname.find('Conv') != -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('Linear') !=", "phiR[2, ...] = Ab[2] * phi[0, ...] + Ab[5] *", "dim == 2: return _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim", "out_channels, kernel_size, stride=1, active_unit='relu', same_padding=False, bn=False, reverse=False, bias=False): super(ConvBnRel, self).__init__()", "idnp = np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) idnp[0, :, :,", "0.02) init.constant(m.bias.data, 0.0) def weights_init_orthogonal(m): classname = m.__class__.__name__ print(classname) if", "it in a dim+1 array and rescale by the ratio", "def apply_affine_transform_to_map(Ab,phi): \"\"\"Applies an affine transform to a map. :param", "it in a dim+1 array if dim==1: idnp = np.zeros([1,", "in the middle) if the sz is odd # Otherwise", "dim).transpose(1, 2) updated_param = torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_param", "elif init_type == 'xavier': net.apply(weights_init_xavier) elif init_type == 'kaiming': net.apply(weights_init_kaiming)", "nn.ReLU(inplace=True) elif active_unit == 'elu': self.active_unit = nn.ELU(inplace=True) else: self.active_unit", "on top of each other. :param Ab: parameter vector :return:", "np.power(sig[1], 2.)) -np.power(X[2,:, :, :] - mu[2], 2.) / (2", "...) # :return: returns the identity map of dimension dimxXxYxZ", "Resample an image to a given desired size :param I:", "= [] for key in pd: pl.append(pd[key]) return pl def", "Ab[9] phiR[1, ...] = Ab[1] * phi[0, ...] + Ab[4]", "def my_hasnan(x): \"\"\"Check if any input elements are NaNs. :param", "spline_order not in [0, 1, 2, 3, 4, 5, 6,", "numpy array \"\"\" return (v.detach()).cpu().numpy() def cxyz_to_xyzc( v ): \"\"\"", "*= spacing[d] # if sz[d]%2==0: # #even # id[d] -=", "spacing def time_warped_function(f): def __time_warped_function(input=None): start = torch.cuda.Event(enable_timing=True) end =", "Parameter of given size. :param sz: just the spatial sizes", "range(nrOfI): id[n,...] = identity_map(sz[2::],spacing,dtype=dtype) return id def centered_identity_map(sz, spacing, dtype='float32'):", "spacing, desiredSize, spline_order=spline_order, zero_boundary=zero_boundary, identity_map=identity_map) return resampled def resample_image(I, spacing,", "# idnp[1, :, :] = id[1] * spacing_ratio[1] # elif", "momentum in dimensions 1-3') return m def create_ND_vector_field_variable_multiN(sz, nr_of_images=1): \"\"\"", "= Ab[1] * phi[0, ...] + Ab[4] * phi[1, ...]", "# idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) # idnp[0, :,", "if dim==1: idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0,:] = id[0]", "return else: os.remove(abs_t_with_right_ext) # now we can do the symlink", "# omt_const = omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) # sz = [1]+ [len(stds)] +[1]*(dim+1)", "(batch size x param. vector); will be overwritten with identity", "3: return _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) else: raise ValueError('Images can", "tensor[:, :,-2,:] - tensor[:, :,-3,:] tensor[:, :, :,-1] = tensor[:,", "pl.append(pd[key]) return pl def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same as get_parameter_list_from_parameter_dict; but also", "describing the spatial spacing :param desiredSize: array for the desired", "identity map (shifted so it is centered around 0) :param", "* phi[2, ...] + Ab[11] else: raise ValueError('Only supports dimensions", ":] = id[0] idnp[1,:, :, :] = id[1] idnp[2,:, :,", "net = \\ [ConvBnRel(self.dim +1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn),", "to vector momentum in dimensions 1-3') return m def create_ND_vector_field_variable_multiN(sz,", "= g/g.sum() return g elif dim == 3: g =", ":, :] = id[2] * spacing_ratio[2] # else: # raise", "(factor is None): print('WARNING: Could not compute low_res_size as factor", "parameters with a specified number of elements. :param nr_of_elements: number", "= v.size() reduction_factor = np.prod(np.array(sz)) condition = True if type(v.data)", "tf): abs_s = os.path.abspath(sf) ext_s = os.path.splitext(abs_s)[1] abs_t = os.path.abspath(tf)", "map (with 0 in the middle) if the sz is", "= id[0] * spacing_ratio[0] # idnp[1, :, :, :] =", "-1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_rd_normal(m): classname =", "-1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('Linear') != -1: init.kaiming_normal(m.weight.data, a=0,", "add dimensions, but not remove them') if current_dim == dim:", "low re parameterization :return: returns spacing of low res parameterization", "BxCxXxYxZ \"\"\" dim = I0.dim()-2 if dim == 1: return", "print(start.elapsed_time(end)) return output return __time_warped_function def interoplate_boundary_right(tensor): dim = len(tensor.shape)-2", "else: net = \\ [ConvBnRel(self.dim + 1, 20, 5, active_unit=active_unit,", "a numpy array on the cpu :param v: torch array", "min_spacing = np.min(spacing) # spacing_ratio =spacing/min_spacing # dim = spacing.size", ":, :,-2] + tensor[:, :, :,-2] - tensor[:, :, :,-3]", "centered_identity_map_multiN(sz, spacing, dtype='float32'): \"\"\" Create a centered identity map (shifted", "in 1D, [5,10] in 2D, [5,10,10] in 3D) :return: returns", "np.zeros([2, sz[0], sz[1]], dtype=dtype) # idnp[0, :, :] = id[0]", "_ = sampler.downsample_image_to_size(I, spacing, low_res_size[2::],spline_order) return low_res_image def individual_parameters_to_model_parameters(ind_pars): model_pars", "which has entries from both of them. :param d1: dictionary", "spacing: spacing of image :return: returns the vector momentum \"\"\"", "is odd # Otherwise shifts everything by 0.5*spacing # #", "identity_map[:n_batch] resampled, new_spacing = resample_image(I, spacing, desiredSize, spline_order=spline_order, zero_boundary=zero_boundary, identity_map=identity_map)", "else ID.numpy(), newspacing def get_res_size_from_size(sz, factor): \"\"\" Returns the corresponding", "lowResSize def get_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\" Computes spacing for the", ":] - tensor[:, :, -3, :] tensor[:, :,:, :, -1,", "normalize for the net kernel :param tensor: :param mean: :param", "= np.ones(dim) centered_id = centered_identity_map(sz,spacing) g = compute_normalized_gaussian(centered_id, mus, stds)", "def lift_to_dimension(A, dim): \"\"\"Creates a view of A of dimension", "number of affine transforms') if dim != len(sz)-2: raise ValueError('Incompatible", "gamma + beta #When affine=False the output of BatchNorm is", "2 for 2D, and 3 for 3D) :return: returns a", "1D, [5,10] in 2D, [5,10,10] in 3D) :return: returns vector", "this is to make sure that subsequent sums work (hence", "a dictionary with keys 'name' and 'model_params' for par in", "for the warping, size BxdimxXxYxZ :param spacing: image spacing [dx,dy,dz]", "first index # # for d in range(dim): # id[d]", "def time_warped_function(f): def __time_warped_function(input=None): start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True)", ":, :, -3] def get_resampled_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None):", "= id.reshape(1,sz[0]) # add a dummy first index for d", "Z :param spacing: spx spy spz :param desiredSize: B C", "currently supported for the centered identity map') # # return", "format model_pars = ind_pars else: # if ind_pars is not", "sz[4]], dtype=dtype) else: raise ValueError('Only dimensions 1-3 are currently supported", "# conv + bn (optional) + relu def __init__(self, in_channels,", "use_bilinear=False, use_01_input=use_01_input) elif spline_order == 1: stn = STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True,", ":param lowResSize: size of low re parameterization :return: returns spacing", "' + str( type(v.data))) def lift_to_dimension(A, dim): \"\"\"Creates a view", "fc+ relu(option) def __init__(self, in_features, out_features, active_unit='relu'): super(FcRel, self).__init__() self.fc", "dummy first index for d in range(dim): id[d] *= spacing[d]", "sz = np.array(list(I.size())) # check that the batch size and", "return 3 else: raise ValueError('Only supports dimensions 1, 2, and", "for c in range(nrOfC): # loop over all the channels", "dim == 2: # id = np.mgrid[0:sz[0], 0:sz[1]] # elif", "= 'gaussian' s_m_params['smoother']['gaussian_std'] = gaussian_std s_m = sf.SmootherFactory(sz, spacing).create_smoother(s_m_params) return", "use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped = stn(I0,", "dummy first index # # for d in range(dim): #", "-1, :] = tensor[:, :, :, -2] + tensor[:, :,", "a_12x+a_22y+b2 elif dim == 3: phiR[0, ...] = Ab[0] *", "self.active_unit is not None: x = self.active_unit(x) return x class", "d1.copy() d.update(d2) return d def get_parameter_list_from_parameter_dict(pd): \"\"\"Takes a dictionary which", "3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1,2) Ab_inv = torch.zeros_like(Ab) for", "map \"\"\" sz = phi.size() dim = len(sz) - 1", "dim != len(sz)-2: raise ValueError('Incompatible number of affine transforms') phiR", "phi, spacing, spline_order, zero_boundary=False, use_01_input=True): if spline_order not in [0,", "None for var in var_list] # else: # new_var_list =", "for 3D) :return: returns a tuple: the downsampled image, the", "init_type == 'rd_normal': net.apply(weights_init_rd_normal) elif init_type == 'normal': net.apply(weights_init_normal) elif", "scalar momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum, batchxXxYxZ :param", "type: ' + str( type(v.data))) def lift_to_dimension(A, dim): \"\"\"Creates a", ":, :, :] = id[1] * spacing_ratio[1] # idnp[2, :,", "# now get it into range [0,(sz-1)*spacing]^d id = np.array(", "2.) / (2 * np.power(sig[0], 2.)) -np.power(X[1,:, :, :] -", "into range [0,(sz-1)*spacing]^d id = np.array( id.astype(dtype) ) if dim==1:", "of this size should not occur in practice anyway sz", "1, 2, and 3.') def set_affine_transform_to_identity(Ab): \"\"\"Sets the affine transformation", "identity map \"\"\" dim = len(sz)-2 nrOfI = int(sz[0]) if", "dim == 1: # idnp = np.zeros([1, sz[0]], dtype=dtype) #", "classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def init_weights(net,", "elif classname.find('Linear') != -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1:", "= torch.cat([moving, target], dim=1) elif sched == 'width_concat': input =", "size nrOfIxdimxXxYxZ \"\"\" nr_of_mg_weights = len(gaussian_std_weights) csz = np.array(sz) #", "g = np.exp(-np.power(X[0, :] - mu[0], 2.)/(2*np.power(sig[0], 2.))) g =", "csz = np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights = torch.empty(*csz) # set the default", "\"\"\" # todo: check that this is the correct way", "s_m_params['smoother']['gaussian_std'] = gaussian_std s_m = sf.SmootherFactory(sz, spacing).create_smoother(s_m_params) return s_m def", "print(\"Grad Input\") print((torch.sum(gi[0].data), torch.sum(gi[1].data))) print(\"Grad Output\") print(torch.sum(go[0].data)) return gi[0], gi[1],", "Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def centered_identity_map_multiN(sz, spacing, dtype='float32'): \"\"\" Create a centered identity", "Ab[0]=1. Ab[3]=1. elif dim==3: Ab.zero_() Ab[0]=1. Ab[4]=1. Ab[8]=1. else: raise", "= sz[0] nrOfC = sz[1] desiredSizeNC = np.array([nrOfI, nrOfC] +", ":return: \"\"\" if isinstance(tensors, Variable): space_normal(tensors.data, std=std) return tensors for", "sz = tensors[n][c].size() mus = np.zeros(dim) stds = std *", "active_unit == 'relu': self.active_unit = nn.ReLU(inplace=True) elif active_unit == 'elu':", "return (x != x).any() def create_symlink_with_correct_ext(sf, tf): abs_s = os.path.abspath(sf)", "A: numpy array :param dim: desired dimension of view :return:", "return 2 elif nr==12: return 3 else: raise ValueError('Only supports", "vector Ab to the identity transform. :param Ab: Affine parameter", "= np.zeros([1, sz[0]], dtype=dtype) idnp[0,:] = id[0] elif dim==2: idnp", "!= -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data,", ":param pd: parameter dictionary :return: tuple of (parameter_list, name_dictionary) \"\"\"", "self.net_sched == 'm_only': input = m elif self.net_sched == 'm_f_s':", "classname.find('Conv') != -1: space_normal(m.weight.data) elif classname.find('Linear') != -1: space_normal(m.weight.data) elif", "+ tensor[:, :, :, -2] - tensor[:, :, :, -3]", "from .data_wrapper import USE_CUDA import numpy as np from .", "id = np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif dim == 3: id = np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype)", "spacing/min_spacing # # # # now get it into range", "= np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0,:, :] = id[0] idnp[1,:,", "else: low_res_sz = np.array(sz) low_res_sz[2::] = (np.ceil((np.array(sz[2::]) * factor))).astype('int16') return", "if dim == 1: # id = id.reshape(1, sz[0]) #", "Inverse of affine parameters \"\"\" dim =0 if Ab.shape[1] ==", "are currently supported for the identity map') for n in", ":return: numpy array \"\"\" return (v.detach()).cpu().numpy() def cxyz_to_xyzc( v ):", "downsampled image, the new spacing after downsampling \"\"\" desiredSize =", "init.orthogonal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data,", "0.02) init.constant(m.bias.data, 0.0) def init_weights(net, init_type='normal'): print('initialization method [%s]' %", "lowResSize: size of low re parameterization :return: returns spacing of", "cxyz_to_xyzc( v ): \"\"\" Takes a torch array and returns", "\\ [ConvBnRel(self.dim +1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim,", "debugging: self.net = nn.Conv2d(self.dim + 2, self.dim, kernel_size, 1, padding=padding_size,", "spacing of image :return: returns the vector momentum \"\"\" nrOfI", "dimension dimxXxYxZ \"\"\" dim = len(sz) if dim==1: id =", "a dim+1 array if dim == 1: idnp = np.zeros([1,", "len(Ab) if nr==2: return 1 elif nr==6: return 2 elif", "to be even: fix properly in the Fourier transform later!\\n\\n')", "= int((kernel_size - 1) // 2) if same_padding else 0", "I: Input image (expected to be of BxCxXxYxZ format) :param", "out_channels, kernel_size, stride, padding=padding,bias=bias) #y = \\frac{x - mean[x]}{ \\sqrt{Var[x]", "dim == 1: idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0, :]", "the output of BatchNorm is equivalent to considering gamma=1 and", "array if dim == 1: idnp = np.zeros([1, sz[0]], dtype=dtype)", "'nn': warped_label_map = compute_warped_image_multiNC(label_map, phi, spacing,spline_order=0,zero_boundary=True) # check if here", "updated_param[n,:,dim] = torch.matmul(Cd[n,:,:dim], Ab[n,:,dim]) +Cd[n,:,dim] updated_param = updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return updated_param", "= np.array(sz) low_res_sz[2::] = (np.ceil((np.array(sz[2::]) * factor))).astype('int16') return low_res_sz def", "gaussian_std_weights[g] tmp = AdaptVal(weights) if get_preweight_from_network: tmp.requires_grad = True else:", "mask = sm.smooth(mask) if pow ==2: mask = mask**2 if", "affine transforms') if dim != len(sz)-2: raise ValueError('Incompatible number of", "d in range(dim): # id[d] *= spacing[d] # if sz[d]%2==0:", "compute_warped_image_multiNC(label_map, phi, spacing,spline_order=0,zero_boundary=True) # check if here should be add", "batchxXxYxZ :param I: image, batchXxYxZ :param sz: size of image", "vector field torch Parameter of given size. :param sz: just", "# :param dtype: numpy data-type ('float32', 'float64', ...) # :return:", "#y = \\frac{x - mean[x]}{ \\sqrt{Var[x] + \\epsilon}} * gamma", "if do_transform: # min_spacing = np.min(spacing) # spacing_ratio =min_spacing/spacing #", "phi) return I1_warped def _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order", "dim == 3: id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] else: raise", "add a dummy first index for d in range(dim): id[d]", "numpy data-type ('float32', 'float64', ...) # :return: returns the identity", "...] + Ab[6] * phi[2, ...] + Ab[9] phiR[1, ...]", "(batch size x param. vector) :return: Inverse of affine parameters", "1: return float(v) def checkNan(x): \"\"\"\" input should be list", "dtype=dtype) idnp[0, :] = id[0] elif dim == 2: idnp", "can do the symlink os.symlink(abs_s,abs_t_with_right_ext) def combine_dict(d1,d2): \"\"\"Creates a dictionary", "idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) # idnp[0, :, :]", ":, :] = id[0] idnp[1, :, :] = id[1] elif", "id = id.reshape(1, sz[0]) # add a dummy first index", "first index for d in range(dim): id[d]*=spacing[d] #id[d]*=2./(sz[d]-1) #id[d]-=1. #", "zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order == 1: stn = STN_ND_BCXYZ(spacing,", "omt_power) # omt_const = omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) # sz = [1]+ [len(stds)]", "in 2D, [5,10,10] in 3D) :param nrOfI: number of images", "the identity transform) :return: \"\"\" dim = get_dim_of_affine_transform(Ab) if dim==1:", "= omt_const/(torch.abs(torch.log(max_std / min_std)) ** omt_power) # omt_const = omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2)", "1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_rd_normal(m): classname = m.__class__.__name__ #", "'normal': net.apply(weights_init_normal) elif init_type == 'uniform': net.apply(weights_init_uniform) elif init_type ==", "out_features, active_unit='relu'): super(FcRel, self).__init__() self.fc = nn.Linear(in_features, out_features) if active_unit", "True sz = np.array(list(I.size())) # check that the batch size", "map \"\"\" dim = len(sz) - 2 nrOfI = sz[0]", "torch array and returns it as a numpy array on", "elif dim == 2: id = np.zeros([nrOfI, 2, sz[2], sz[3]],", "Ab[2] * phi[1, ...] + Ab[4] # a_11x+a_21y+b1 phiR[1, ...]", "sz, lowResSize): \"\"\" Computes spacing for the low-res parameterization from", "omt_const = omt_const/(torch.abs(torch.log(max_std / min_std)) ** omt_power) # omt_const =", "[ConvBnRel(self.dim, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,self.dim, 5, active_unit=active_unit, same_padding=True,", "forward(self, m,new_s=None): m = m * self.mask input = self.prepare_data(m,new_s)", "form y=Ax+b stored in a column vector. For A =[a1,a2,a3],", "def forward(self, x): x = self.fc(x) if self.active_unit is not", "Ab[2] * phi[0, ...] + Ab[5] * phi[1, ...] +", ":] = id[0] * spacing_ratio[0] # idnp[1, :, :] =", "output return __time_warped_function def interoplate_boundary_right(tensor): dim = len(tensor.shape)-2 if dim==1:", "64 - bit floating point: torch.DoubleTensor, torch.cuda.DoubleTensor # 16 -", "mu, sig) \"\"\" dim = len(mu) if dim == 1:", "of image :return: returns the vector momentum \"\"\" nrOfI =", "for the centered identity map') # # return idnp #", ":param d2: dictionary 2 :return: resulting dictionary \"\"\" d =", "imported (only supported in CUDA at the moment). ' 'Some", "sched == 'difference': input = moving-target return input def bh(m,gi,go):", "\\ [ConvBnRel(self.dim, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,self.dim, 5, active_unit=active_unit,", "def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing = np.min(spacing) #", "torch.matmul(Cd[n,:,:dim], Ab[n,:,dim]) +Cd[n,:,dim] updated_param = updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return updated_param def apply_affine_transform_to_map(Ab,phi):", "1-3 are currently supported for the identity map') for n", "dim==1: id = id.reshape(1,sz[0]) # add a dummy first index", "== 'm_only': input = m elif self.net_sched == 'm_f_s': input", "make sure that subsequent sums work (hence will be smaller", "number of images :return: returns vector field of size nrOfIxdimxXxYxZ", "returns a dictionary which keeps track of the keys based", "work (hence will be smaller than it could be, #", "spy spz :param desiredSize: B C X Y Z :param", "__init__(self, inputs, dim, net_sched=None): # settings should include [using_bias, using", "* phi[0, ...] + Ab[3] * phi[1, ...] + Ab[5]", "number of elements. :param nr_of_elements: number of vector elements :return:", "g.sum() return g else: raise ValueError('Can only compute Gaussians in", "torch Variable of given size :param sz: just the spatial", "given by the column vector Ab to the identity transform.", "\\epsilon}} * gamma + beta #When affine=False the output of", "elif self.net_sched == 'm_d_s_f_t': input = organize_data(m, new_s, sched='depth_concat') input", "size BxCxXxYxZ \"\"\" dim = I0.dim()-2 if dim == 1:", "top of each other. :param Ab: parameter vector :return: dimensionality", "second dimension here is image dim, not nrOfC nrOfC =", "lowResSize[2::] = (np.ceil((np.array(sz[2:]) * np.array(factor)))).astype('int16') if lowResSize[-1] % 2 !=", "all the channels and add the results m = m", "* gamma + beta #When affine=False the output of BatchNorm", "spline_order,zero_boundary=False,use_01_input=True): if spline_order not in [0, 1, 2, 3, 4,", "model_pars def compute_vector_momentum_from_scalar_momentum_multiNC(lam, I, sz, spacing): \"\"\"Computes the vector momentum", "store it in a dim+1 array if dim == 1:", "now use this map for resampling ID = compute_warped_image_multiNC(I, idDes,", "-2, :] - tensor[:, :, -3, :] tensor[:, :,:, :,", "padding_size = (kernel_size-1)//2 if self.net_sched == 'm_only': if debugging: self.net", "[5] in 1D, [5,10] in 2D, [5,10,10] in 3D) :return:", "odd Otherwise shifts everything by 0.5*spacing :param sz: just the", "...] + Ab[4] # a_11x+a_21y+b1 phiR[1, ...] = Ab[1] *", "sz :param sz: size (high-res) :param factor: low-res factor (needs", "if dim == 1: return _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif", "a dummy first index # # for d in range(dim):", "compute_warped_image_multiNC(I0.view(torch.Size([1, 1] + list(I0.size()))), phi.view(torch.Size([1] + list(phi.size()))), spacing, spline_order, zero_boundary,", "a vector parameters with a specified number of elements. :param", "# def compute_omt_const(stds,param,dim): # omt_power = param['forward_model']['smoother']['omt_power'] # omt_weight_penalty =", "torch Parameter of given size :param sz: just the spatial", "from both of them. :param d1: dictionary 1 :param d2:", "3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1, 2) Cd = Cd.view(Cd.shape[0],", "dim+1 array and rescale by the ratio # if dim", "np.zeros([1, sz[0]], dtype=dtype) # idnp[0, :] = id[0] * spacing_ratio[0]", "torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else: raise ValueError('Unknown data type: ' +", "self.conv(x) if self.bn is not None: x = self.bn(x) if", "in a column vector. For A =[a1,a2,a3], the parameter vector", "ValueError('Images can only be warped in dimensions 1 to 3')", "to 3') def _get_low_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\"Computes spacing for the", "spline_order == 1: stn = STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn", "== 3: # id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] # else:", ":param std: :return: \"\"\" if isinstance(tensors, Variable): space_normal(tensors.data, std=std) return", "mean :param sig: array indicating the standard deviations for the", "== type(dict()): # should already be in the right format", "size nrOfIxnrOfCxXxYxZ \"\"\" csz = np.array(sz) # just to make", "identity map code') raise ValueError('Double check the spacing here before", "them. :param d1: dictionary 1 :param d2: dictionary 2 :return:", "= CAx+Cb+d = x; C = inv(A), d = -Cb", "use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped = stn(I0, phi)", "identity (in the case of arbitrary batch size). :param Ab:", "1, padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim +1, 20,", "it could be, # but values of this size should", "spacing: list with spacing information [sx,sy,sz] :param dtype: numpy data-type", "make sure it is a numpy array csz = np.array([nrOfI,nrOfC]+list(csz))", "the parameter vector is simply [a1;a2;a3;b], i.e., all columns stacked", "by 0.5*spacing :param sz: just the spatial dimensions, i.e., XxYxZ", "target), dim=3) elif sched == 'list_concat': input = torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif", "...] + Ab[3] * phi[1, ...] + Ab[6] * phi[2,", "numpy array :return: True if NaNs are present, False else", "# todo will remove, currently fix for symmetric training if", "os.path.samefile(abs_s,abs_t_with_right_ext): # nothing to do here, these are already the", "of parameters that can be used as an input to", "!= -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_xavier(m): classname", "[1]+[dim] +[1]*dim # spacing_ratio_t = spacing_ratio_t.view(*sp_sz) # new_var_list = [var*spacing_ratio_t", "code') spacing = np.ones(dim) centered_id = centered_identity_map(sz,spacing) g = compute_normalized_gaussian(centered_id,", "a numpy array csz = np.array([nrOfI, dim]+list(csz)) if get_field_from_external_network: tmp", "size of image :param lowResSize: size of low re parameterization", "id. :param pd: parameter dictionary :return: tuple of (parameter_list, name_dictionary)", "# print(classname) if classname.find('Conv') != -1: init.uniform(m.weight.data, 0.038, 0.042) elif", "the centered identity map') return idnp # # def centered_min_normalized_identity_map(sz,", "sure it is a numpy array csz = np.array([nrOfI,nrOfC]+list(csz)) return", "idnp[1,:, :, :] = id[1] idnp[2,:, :, :] = id[2]", "Ab[0] * phi[0, ...] + Ab[2] * phi[1, ...] +", "case of arbitrary batch size). :param Ab: Parameter vectors B", "...] + Ab[7] * phi[2, ...] + Ab[10] phiR[2, ...]", "STN_ND_BCXYZ from .data_wrapper import AdaptVal from .data_wrapper import MyTensor from", "array csz = np.array([nr_of_images, dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0., 1e-7) def create_ND_vector_field_variable(sz):", "nrOfI=1,get_field_from_external_network=False): \"\"\"Create vector field torch Parameter of given size. :param", "sched == 'width_concat': input = torch.cat((moving, target), dim=3) elif sched", ":] - mu[0], 2.)/(2*np.power(sig[0], 2.))) g = g/g.sum() return g", "+ \\epsilon}} * gamma + beta #When affine=False the output", "net += [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched =='m_f_s': if", "get_inverse_affine_param(Ab): \"\"\"Computes inverse of affine transformation. Formally: C(Ax+b)+d = CAx+Cb+d", "dim = len(sz) if dim==1: id = np.mgrid[0:sz[0]] elif dim==2:", "Variable): space_normal(tensors.data, std=std) return tensors for n in range(tensors.size()[0]): for", "\"\"\" :param I: B C X Y Z :param spacing:", "'m_f_s': input = organize_data(m,self.s,sched='depth_concat') elif self.net_sched == 'm_d_s': input =", "= id.reshape(1, sz[0]) # add a dummy first index for", "root_t,ext_t = os.path.splitext(abs_t) abs_t_with_right_ext = root_t + ext_s if os.path.isfile(abs_t_with_right_ext):", "0: # return get_warped_label_map(I0,phi,spacing) stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input)", ":param Ab: B x pars (batch size x param. vector)", "5, 6, 7, 8, 9]: raise ValueError('Currently only orders 0", "self.active_unit = None def forward(self, x): x = self.conv(x) if", "stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped = stn(I0, phi) return I1_warped", "here, these are already the same file return else: os.remove(abs_t_with_right_ext)", "3D) :return: returns a tuple: the downsampled image, the new", "= d1.copy() d.update(d2) return d def get_parameter_list_from_parameter_dict(pd): \"\"\"Takes a dictionary", "d = -Cb :param Ab: B x pars (batch size", "('float32', 'float64', ...) :return: returns the identity map \"\"\" dim", "the desired size (excluding B and C, i.e, 1 entry", "self).__init__() self.fc = nn.Linear(in_features, out_features) if active_unit == 'relu': self.active_unit", "should include [using_bias, using bn, using elu] # inputs should", "self.net = nn.Conv2d(self.dim+2, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net", "# max_std = torch.max(stds) # omt_const = torch.abs(torch.log(max_std/stds))**omt_power # omt_const", "* phi[2, ...] + Ab[10] phiR[2, ...] = Ab[2] *", "start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) start.record() output = f(input)", ":, :] = fdt.dYc(I)*lam elif dim == 3: m[:, 0,", "torch.Tensor(I) is_numpy = True sz = np.array(list(I.size())) # check that", "not in [1,2,3]: raise ValueError('Only supports dimensions 1, 2, and", "res parameterization \"\"\" #todo: check that this is the correct", "# idnp[2, :, :, :] = id[2] * spacing_ratio[2] #", "net.apply(weights_init_orthogonal) else: raise NotImplementedError('initialization method [%s] is not implemented' %", "+ ext_s if os.path.isfile(abs_t_with_right_ext): if os.path.samefile(abs_s,abs_t_with_right_ext): # nothing to do", "net += [nn.Sigmoid()] self.net = nn.Sequential(*net) def prepare_data(self, m, new_s):", ":dim]) Ab_inv[n, :, :dim] = tm_inv Ab_inv[n, :, dim] =", "organize_data(m, new_s, sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') return input", "else: raise ValueError('Images can only be warped in dimensions 1", "newspacing = spacing * ((sz[2::].astype('float') - 1.) / ( desiredSizeNC[2::].astype('float')", "elif Ab.shape[1]==6: dim = 2 elif Ab.shape[1]==12: dim = 3", "m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.uniform(m.weight.data, 0.038, 0.042)", "idnp[0,:] = id[0] elif dim==2: idnp = np.zeros([2, sz[0], sz[1]],", "net = \\ [ConvBnRel(self.dim + 1, 20, 5, active_unit=active_unit, same_padding=True,", "2, and 3.') def set_affine_transform_to_identity_multiN(Ab): \"\"\"Set the affine transforms to", "for the desired size (excluding B and C, i.e, 1", "id = np.mgrid[0:sz[0]] elif dim == 2: id = np.mgrid[0:sz[0],", "id[d]*=spacing[d] #id[d]*=2./(sz[d]-1) #id[d]-=1. # and now store it in a", "\"\"\" nr = len(Ab) if nr==2: return 1 elif nr==6:", "range(dim): id[d]*=spacing[d] #id[d]*=2./(sz[d]-1) #id[d]-=1. # and now store it in", "resampled def resample_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" Resample", ":math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum, batchxXxYxZ :param I: image,", "classname.find('Linear') != -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data,", "vector momentum from the scalar momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam:", "a normalized Gaussian. :param X: map with coordinates at which", "in pd: pl.append(pd[key]) par_to_name_dict[pd[key]] = key return pl, par_to_name_dict def", "from . import finite_differences as fd import torch.nn as nn", "init_type == 'xavier': net.apply(weights_init_xavier) elif init_type == 'kaiming': net.apply(weights_init_kaiming) elif", "if spline_order == 0: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input)", "init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('Linear') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in')", "for c in range(tensors.size()[1]): dim = tensors[n][c].dim() sz = tensors[n][c].size()", "lowResSize = np.array(sz) if not isinstance(factor, list): lowResSize[2::] = (np.ceil((np.array(sz[2:])", "dim == 1: phiR = phi * Ab[0] + Ab[1]", "dimensions :return: Normalized Gaussian evaluated at coordinates in X Example::", "lowResSize): \"\"\"Computes spacing for the low-res parametrization from image spacing.", "- 1) if identity_map is not None: # todo will", "= mask**2 if pow ==3: mask = mask*mask*mask return mask", "identity map') # # return idnp # # def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True):", "return float(v) def checkNan(x): \"\"\"\" input should be list of", "and 3.') def set_affine_transform_to_identity(Ab): \"\"\"Sets the affine transformation as given", "len(v.shape)-2 if dim ==2: v = v.permute(0,2,3,1) if dim ==3:", "C X Y Z :param spacing: spx spy spz :param", "AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing))) # now use this map for resampling ID", "== 'm_f_s': input = organize_data(m,self.s,sched='depth_concat') elif self.net_sched == 'm_d_s': input", "A.reshape([1]*(dim-current_dim)+list(A.shape)) def get_dim_of_affine_transform(Ab): \"\"\"Returns the number of dimensions corresponding to", "raise ValueError('Only supports dimensions 1, 2, and 3.') return phiR", "= torch.cuda.Event(enable_timing=True) start.record() output = f(input) end.record() # Waits for", "idnp[0, :, :] = id[0] idnp[1, :, :] = id[1]", "* spacing_ratio[0] # idnp[1, :, :] = id[1] * spacing_ratio[1]", "compute_omt_const(stds,param,dim): # omt_power = param['forward_model']['smoother']['omt_power'] # omt_weight_penalty = param['forward_model']['smoother']['omt_weight_penalty'] #", "returns the identity map \"\"\" dim = len(sz) - 2", "...] = centered_identity_map(sz[2::], spacing,dtype=dtype) return id def identity_map_multiN(sz,spacing,dtype='float32'): \"\"\" Create", "m,new_s=None): m = m * self.mask input = self.prepare_data(m,new_s) x=", "range [0,(sz-1)*spacing]^d # id = np.array(id.astype(dtype)) # if dim ==", "vector elements :return: returns the parameter vector \"\"\" return Parameter(MyTensor(nr_of_elements).normal_(0.,", "print('WARNING: Could not compute low_res_size as factor was ' +", "x = self.conv(x) if self.bn is not None: x =", "sz = v.size() reduction_factor = np.prod(np.array(sz)) condition = True if", ":param nr_of_elements: number of vector elements :return: returns the parameter", "np.array(sz) else: low_res_sz = np.array(sz) low_res_sz[2::] = (np.ceil((np.array(sz[2::]) * factor))).astype('int16')", "into a list of parameters that can be used as", "assume that they come from the optimizer # (i.e., list", "=='m_f_s': if debugging: self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size, 1, padding=padding_size,", "# :param sz: just the spatial dimensions, i.e., XxYxZ #", "import torch from torch.nn.parameter import Parameter from torch.autograd import Variable", "\"\"\"\" input should be list of Variable \"\"\" return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy())))", "but also returns a dictionary which keeps track of the", "image of size BxCxXxYxZ \"\"\" dim = I0.dim()-2 if dim", "np.zeros([nrOfI, 3, sz[2], sz[3], sz[4]], dtype=dtype) else: raise ValueError('Only dimensions", "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]:", "dim).transpose(1,2) Ab_inv = torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_inv =", "different view (effectively adding dimensions) Iw = compute_warped_image_multiNC(I0.view(torch.Size([1, 1] +", "*= spacing[d] if sz[d]%2==0: #even id[d] -= spacing[d]*(sz[d]//2) else: #odd", "array describing the spatial spacing :param desiredSize: array for the", "= inv(A), d = -Cb :param Ab: B x pars", "net += [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_d_s':", "np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else: raise ValueError('Only dimensions 1-3 are currently supported for", "id = np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif dim == 2: id = np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype)", "that this is the correct way of doing it return", "1, :, :] = fdt.dYc(I)*lam elif dim == 3: m[:,", "# new_var_list = [var*spacing_ratio_t if var is not None else", "the correct way of doing it return spacing * (np.array(sz[2::])", "it return spacing * (np.array(sz[2::]) - 1) / (np.array(lowResSize[2::]) -", "print(classname) if classname.find('Conv') != -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('Linear') !=", "are already the same file return else: os.remove(abs_t_with_right_ext) # now", "last dimension to be even: fix properly in the Fourier", "for 2D, and 3 for 3D) :return: returns a tuple:", "print(classname) if classname.find('Conv') != -1: space_normal(m.weight.data) elif classname.find('Linear') != -1:", "organize_data(m,self.s,sched='depth_concat') elif self.net_sched == 'm_d_s': input = organize_data(m, new_s, sched='depth_concat')", ":] = id[0] idnp[1, :, :, :] = id[1] idnp[2,", "def _get_low_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\"Computes spacing for the low-res parametrization", "x param. vector) :return: Inverse of affine parameters \"\"\" dim", "parameterization from image spacing :param spacing: image spacing :param sz:", "and 3.') phiR = MyTensor(sz).zero_().type_as(phi) if dim == 1: phiR", "# if do_transform: # min_spacing = np.min(spacing) # spacing_ratio =spacing/min_spacing", "mask for the omt \"\"\" dim = len(img_sz) mask_sz =", "2D, and 3 for 3D) :return: returns a tuple: the", "dim == 2: g = np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) - np.power(X[1,:, :] -", "list(phi.size()))), spacing, spline_order, zero_boundary, use_01_input) return Iw.view(I0.size()) def compute_warped_image_multiNC(I0, phi,", "nn.Sequential(*net) elif self.net_sched =='m_f_s': if debugging: self.net = nn.Conv2d(self.dim+1, self.dim,", "stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if", "== 1: stn = STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn =", "omt_const/(torch.abs(torch.log(max_std / min_std)) ** omt_power) # omt_const = omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) #", ":return: Updated affine parameters \"\"\" dim = 0 if Ab.shape[1]==2:", "0.0) def weights_init_xavier(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv')", "= len(img_sz) mask_sz = [1,1]+ list(img_sz) mask = AdaptVal(torch.ones(*mask_sz))*mask_value if", "m = create_ND_vector_field_variable_multiN(sz, nrOfI) if dim == 1: m[:, 0,", "get_preweight_from_network: tmp.requires_grad = True else: tmp = Parameter(tmp) return tmp", "Returns an identity map. :param sz: just the spatial dimensions,", "!= x).any() def create_symlink_with_correct_ext(sf, tf): abs_s = os.path.abspath(sf) ext_s =", "with spacing information [sx,sy,sz] # :param dtype: numpy data-type ('float32',", "min_std = torch.min(stds) # max_std = torch.max(stds) # omt_const =", "(in the case of arbitrary batch size). :param Ab: Parameter", "spacing :param sz: size of image :param lowResSize: size of", ":param dtype: numpy data-type ('float32', 'float64', ...) :return: returns the", "of A of dimension dim (by adding dummy dimensions if", "return s_m def get_warped_label_map(label_map, phi, spacing, sched='nn'): if sched ==", "keys based on memory id. :param pd: parameter dictionary :return:", "this package in a more meaningful way. \"\"\" from __future__", "not in [0, 1, 2, 3, 4, 5, 6, 7,", "ind_pars else: # if ind_pars is not a dictionary assume", "dim == 2: id = np.mgrid[0:sz[0], 0:sz[1]] elif dim ==", ":] = id[1] * spacing_ratio[1] # elif dim == 3:", "elif dim == 3: id = np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else: raise ValueError('Only", "input def bh(m,gi,go): print(\"Grad Input\") print((torch.sum(gi[0].data), torch.sum(gi[1].data))) print(\"Grad Output\") print(torch.sum(go[0].data))", ">>> X = [0,0] >>> print(compute_normalized_gaussian(X, mu, sig) \"\"\" dim", "abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))< 0.1, \"nn interpolation is not precise\" else: raise", "before running this code') spacing = np.ones(dim) centered_id = centered_identity_map(sz,spacing)", "debugging: self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size, 1, padding=padding_size, bias=False) else:", "mu, sig = [1,1], [1,1] >>> X = [0,0] >>>", "+ Ab[5] * phi[1, ...] + Ab[8] * phi[2, ...]", "create_ND_vector_field_variable_multiN(sz[2::], nrOfI) # attention that the second dimension here is", "XxYxZ :param spacing: list with spacing information [sx,sy,sz] :param dtype:", "identity map. :param sz: just the spatial dimensions, i.e., XxYxZ", "dictionary :return: tuple of (parameter_list, name_dictionary) \"\"\" par_to_name_dict = dict()", "sz: size (high-res) :param factor: low-res factor (needs to be", "n in range(nrOfI): id[n,...] = identity_map(sz[2::],spacing,dtype=dtype) return id def centered_identity_map(sz,", "(needs to be <1) :return: low res size \"\"\" if", "dim = len(sz) csz = np.array(sz) # just to make", "kernel :param tensor: :param mean: :param std: :return: \"\"\" if", "= I.shape[0] desiredSize = desiredSize.copy() desiredSize[0] = n_batch identity_map =", "= \\ [ConvBnRel(self.dim + 1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn),", "Ab[n,:,dim]) inv_affine_param = Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0], -1) return inv_affine_param def update_affine_param(Ab,", "image spacing :param sz: size of image :param lowResSize: size", "elif dim == 2: idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype)", "it return spacing * (np.array(sz[2::])-1) / (np.array(lowResSize[2::])-1) def _get_low_res_size_from_size(sz, factor):", "these are already the same file return else: os.remove(abs_t_with_right_ext) #", "of low res parameterization \"\"\" # todo: check that this", "the different dimensions :return: Normalized Gaussian evaluated at coordinates in", "map with coordinates at which to evaluate :param mu: array", "1-3') def _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): if spline_order", "if ind_pars is not a dictionary assume that they come", "\"\"\"Computes spacing for the low-res parametrization from image spacing. :param", "supported for the identity map') # # min_spacing = np.min(spacing)", "AdaptVal from .data_wrapper import MyTensor from . import smoother_factory as", "bn=False, reverse=False, bias=False): super(ConvBnRel, self).__init__() padding = int((kernel_size - 1)", "identity_map(sz[2::],spacing,dtype=dtype) return id def centered_identity_map(sz, spacing, dtype='float32'): \"\"\" Returns a", "# idnp = np.zeros([1, sz[0]], dtype=dtype) # idnp[0, :] =", "t2np(v): \"\"\" Takes a torch array and returns it as", ":param I: Input image (expected to be of BxCxXxYxZ format)", "\"\"\" return (x != x).any() def create_symlink_with_correct_ext(sf, tf): abs_s =", "__future__ import absolute_import # from builtins import str # from", "key value pairs for model parameters and converts it into", "None else None for var in var_list] # else: #", "the self.net and self.net_input padding_size = (kernel_size-1)//2 if self.net_sched ==", "else: self.active_unit = None def forward(self, x): x = self.conv(x)", "I`. :param lam: scalar momentum, batchxXxYxZ :param I: image, batchXxYxZ", "(expected to be of BxCxXxYxZ format) :param spacing: array describing", "elif dim==2: id = np.mgrid[0:sz[0],0:sz[1]] elif dim==3: id = np.mgrid[0:sz[0],0:sz[1],0:sz[2]]", "len(sz) # if dim == 1: # id = np.mgrid[0:sz[0]]", "spline_order == 0: # return get_warped_label_map(I0,phi,spacing) stn = STN_ND_BCXYZ(spacing, zero_boundary,", "sz, spacing): \"\"\"Computes the vector momentum from the scalar momentum:", "import get_nn_interpolation except ImportError: print('WARNING: nn_interpolation could not be imported", "...]) return phiR def compute_normalized_gaussian(X, mu, sig): \"\"\"Computes a normalized", "(hence will be smaller than it could be, # but", "= id[0] * spacing_ratio[0] # elif dim == 2: #", "phiR = phi * Ab[0] + Ab[1] elif dim ==", "var in var_list] # else: # new_var_list = var_list #", "with coordinates at which to evaluate :param mu: array indicating", "1-3') return m def create_ND_vector_field_variable_multiN(sz, nr_of_images=1): \"\"\" Create vector field", "dim == 1: id = np.zeros([nrOfI, 1, sz[2]], dtype=dtype) elif", "a numpy array csz = np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights = torch.empty(*csz) #", "= np.zeros([nrOfI, 1, sz[2]], dtype=dtype) elif dim == 2: id", "1, sz[2]], dtype=dtype) elif dim == 2: id = np.zeros([nrOfI,", "Otherwise shifts everything by 0.5*spacing :param sz: just the spatial", "sz[0]], dtype=dtype) # idnp[0, :] = id[0] * spacing_ratio[0] #", ":, :,-3] if dim==3: tensor[:, :,:, -1,:, :] = tensor[:,", "abs_s = os.path.abspath(sf) ext_s = os.path.splitext(abs_s)[1] abs_t = os.path.abspath(tf) root_t,ext_t", "return g elif dim == 3: g = np.exp(-np.power(X[0,:, :,", "size from a (high-res) sz :param sz: size (high-res) :param", "-1: init.uniform(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0,", "if sched == 'depth_concat': input = torch.cat([moving, target], dim=1) elif", "'m_d_s_f_t': if debugging: self.net = nn.Conv2d(self.dim + 2, self.dim, kernel_size,", "the ratio # if dim == 1: # idnp =", "2, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net = \\", "the spacing here before running this code') spacing = np.ones(dim)", "3: id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] else: raise ValueError('Only dimensions", "nrOfI) if dim == 1: m[:, 0, :] = fdt.dXc(I)*lam", "array indicating the mean :param sig: array indicating the standard", "identity trans. :return: \"\"\" sz = Ab.size() nr_of_images = sz[0]", "I, sz, spacing): \"\"\"Computes the vector momentum from the scalar", "corresponds to dimension) :return: returns transformed maps \"\"\" sz =", "- mu[2], 2.) / (2 * np.power(sig[2], 2.))) g =", "if classname.find('Conv') != -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('Linear') != -1:", "ValueError('Double check the spacing here before running this code') spacing", "BxCxXxYxZ :param I: image, BxCxXxYxZ :param sz: size of image", "it is a numpy array csz = np.array([nr_of_images, dim]+list(csz)) return", "x = self.fc(x) if self.active_unit is not None: x =", ":param A: numpy array :param dim: desired dimension of view", "if the sz is odd Otherwise shifts everything by 0.5*spacing", "of parameters \"\"\" pl = [] for key in pd:", "else: raise NotImplementedError('initialization method [%s] is not implemented' % init_type)", "ValueError('Only supports dimensions 1, 2, and 3.') phiR = MyTensor(sz).zero_().type_as(phi)", "Ab[1] elif dim == 2: phiR[0, ...] = Ab[0] *", "low_res_size[2::],spline_order) return low_res_image def individual_parameters_to_model_parameters(ind_pars): model_pars = dict() if type(ind_pars)", "if dim == 1: g = np.exp(-np.power(X[0, :] - mu[0],", "as pars from .spline_interpolation import SplineInterpolation_ND_BCXYZ import os try: from", "init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_kaiming(m): classname = m.__class__.__name__", "torch.nn as nn import torch.nn.init as init from . import", "ValueError('Currently only orders 0 to 9 are supported') if spline_order", "is a numpy array csz = np.array([nrOfI,nrOfC]+list(csz)) return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def", "sz[2]], dtype=dtype) elif dim == 2: id = np.zeros([nrOfI, 2,", "= len(A.shape) if current_dim > dim: raise ValueError('Can only add", "range(Ab.shape[0]): tm_param = torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim] = tm_param updated_param[n,:,dim] = torch.matmul(Cd[n,:,:dim],", "\"\"\" return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for elem in x] def noramlized_spacing_to_smallest(spacing): min_sp", "spacing information [sx,sy,sz] :param dtype: numpy data-type ('float32', 'float64', ...)", "BatchNorm is equivalent to considering gamma=1 and beta=0 as constants.", "= Ab.size()[0] if nr_of_images != sz[0]: raise ValueError('Incompatible number of", "classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_normal(m):", "init_type == 'orthogonal': net.apply(weights_init_orthogonal) else: raise NotImplementedError('initialization method [%s] is", "= np.mgrid[0:sz[0], 0:sz[1]] # elif dim == 3: # id", "sz is odd # Otherwise shifts everything by 0.5*spacing #", "if any input elements are NaNs. :param x: numpy array", "nrOfC: number of channels :return: returns vector field of size", "else: net = \\ [ConvBnRel(self.dim +1, 20, 5, active_unit=active_unit, same_padding=True,", "def compute_normalized_gaussian(X, mu, sig): \"\"\"Computes a normalized Gaussian. :param X:", "sz: size of image :param spacing: spacing of image :return:", "set_affine_transform_to_identity(Ab): \"\"\"Sets the affine transformation as given by the column", "else: lowResSize = np.array(sz) if not isinstance(factor, list): lowResSize[2::] =", "2: # id = np.mgrid[0:sz[0], 0:sz[1]] # elif dim ==", ":param spacing: image spacing [dx,dy,dz] :return: returns the warped image", "2 elif Ab.shape[1] == 12: dim = 3 if dim", "n in range(Ab.shape[0]): tm_param = torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim] = tm_param updated_param[n,:,dim]", "Ab: affine transform parameter column vectors (batch size x param.", "phi.size() dim = len(sz) - 1 if dim not in", "XxYxZ :param phi: map for the warping, size dimxXxYxZ :param", "dim = len(sz)-2 nrOfI = int(sz[0]) if dim == 1:", "in range(nrOfI): id[n, ...] = centered_identity_map(sz[2::], spacing,dtype=dtype) return id def", "# now use this map for resampling ID = compute_warped_image_multiNC(I,", "array :return: numpy array \"\"\" dim = len(v.shape)-2 if dim", "= nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=padding, bias=bias) else: self.conv =", "= np.mgrid[0:sz[0]] elif dim == 2: id = np.mgrid[0:sz[0], 0:sz[1]]", "...] + Ab[11] else: raise ValueError('Only supports dimensions 1, 2,", ":] + tensor[:, :, -2, :] - tensor[:, :, -3,", "= self.active_unit(x) return x class AdpSmoother(nn.Module): \"\"\" a simple conv.", "create_ND_vector_field_variable_multiN(sz, nr_of_images=1): \"\"\" Create vector field torch Variable of given", "not occur in practice anyway sz = v.size() reduction_factor =", "- mu[0], 2.) / (2 * np.power(sig[0], 2.)) -np.power(X[1,:, :,", "same nrOfI = sz[0] nrOfC = sz[1] desiredSizeNC = np.array([nrOfI,", "sz[0], sz[1], sz[2]], dtype=dtype) # idnp[0, :, :, :] =", "affine transform to a map. :param Ab: affine transform parameter", "phi: map for the warping, size dimxXxYxZ :param spacing: image", ":] = id[1] idnp[2, :, :, :] = id[2] else:", "m[:, 1, :, :, :] = fdt.dYc(I)*lam m[:, 2, :,", "spacing: image spacing [dx,dy,dz] :return: returns the warped image of", ":] tensor[:, :,:, :, -1, :] = tensor[:, :, :,", "from a (high-res) sz :param sz: size (high-res) :param factor:", "to a map. :param Ab: affine transform parameter column vector", "size (excluding B and C, i.e, 1 entry for 1D,", "[5] in 1D, [5,10] in 2D, [5,10,10] in 3D) :param", "spacing, dtype='float32'): \"\"\" Returns a centered identity map (with 0", "vector field of size nrOfIxdimxXxYxZ \"\"\" nr_of_mg_weights = len(gaussian_std_weights) csz", "id = np.mgrid[0:sz[0], 0:sz[1]] elif dim == 3: id =", "compute_vector_momentum_from_scalar_momentum_multiN(lam, I, nrOfI, sz, spacing): \"\"\"Computes the vector momentum from", "- 1) // 2) if same_padding else 0 if not", "supported for the identity map') for n in range(nrOfI): id[n,...]", "make sure it is a numpy array csz = np.array([nrOfI,", "tensor[:,:,-1]= tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3] if dim==2: tensor[:, :, -1,:] = tensor[:,", "Variable of given size. :param sz: just the spatial sizes", "weights_init_orthogonal(m): classname = m.__class__.__name__ print(classname) if classname.find('Conv') != -1: init.orthogonal(m.weight.data,", "print('WARNING: nn_interpolation could not be imported (only supported in CUDA", "self.net_sched == 'm_f_s_t': if debugging: self.net = nn.Conv2d(self.dim+2, self.dim, kernel_size,", "for symmetric training if I.shape[0] != identity_map.shape[0]: n_batch = I.shape[0]", "= tensor[:, :,-2,:] + tensor[:, :,-2,:] - tensor[:, :,-3,:] tensor[:,", "sched =='w_K_w': gaussian_std_weights = [torch.sqrt(std_w) for std_w in gaussian_std_weights] for", "the identity (in the case of arbitrary batch size). :param", "Fourier transform later!\\n\\n') return lowResSize def get_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\"", "the identity map') for n in range(nrOfI): id[n,...] = identity_map(sz[2::],spacing,dtype=dtype)", "stored in a column vector. For A =[a1,a2,a3], the parameter", "if spline_order == 0: # return get_warped_label_map(I0,phi,spacing) stn = STN_ND_BCXYZ(spacing,", "= tensor[:, :, -2, :] + tensor[:, :, -2, :]", "dimensions 1-3') return m def create_ND_vector_field_variable_multiN(sz, nr_of_images=1): \"\"\" Create vector", "an affine transformation of the form y=Ax+b stored in a", "into range [0,(sz-1)*spacing]^d id = np.array(id.astype(dtype)) if dim == 1:", "point: torch.HalfTensor, torch.cuda.HalfTensor # todo: maybe find a cleaner way", "a cleaner way of handling this # this is to", "in range(Ab.shape[0]): tm_inv = torch.inverse(Ab[n, :, :dim]) Ab_inv[n, :, :dim]", "# elif dim == 3: # idnp = np.zeros([3, sz[0],", "stride, padding=padding,bias=bias) #y = \\frac{x - mean[x]}{ \\sqrt{Var[x] + \\epsilon}}", "and beta=0 as constants. self.bn = nn.BatchNorm2d(out_channels, eps=0.0001, momentum=0, affine=True)", "= m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: space_normal(m.weight.data) elif", "the omt \"\"\" dim = len(img_sz) mask_sz = [1,1]+ list(img_sz)", "in the Fourier transform later!\\n\\n') return lowResSize def get_res_spacing_from_spacing(spacing, sz,", "get_net_sched(self, debugging=True, using_bn=True, active_unit='relu', using_sigmoid=False , kernel_size=5): # return the", "interpolation is not precise\" else: raise ValueError(\" the label warping", "20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5, active_unit=active_unit, same_padding=True,", "of view :return: returns view of A of appropriate dimension", "= tm_inv Ab_inv[n, :, dim] = - torch.matmul(tm_inv, Ab[n,:,dim]) inv_affine_param", "from the optimizer # (i.e., list and each list element", "m[:, 0, :] = fdt.dXc(I)*lam elif dim == 2: m[:,", "-Cb :param Ab: B x pars (batch size x param.", "[sx,sy,sz] :param dtype: numpy data-type ('float32', 'float64', ...) :return: returns", ":] = id[1] elif dim == 3: idnp = np.zeros([3,", "= centered_identity_map(sz,spacing) g = compute_normalized_gaussian(centered_id, mus, stds) tensors[n,c] = torch.from_numpy(g)", ":param dim: desired dimension of view :return: returns view of", "size :param sz: just the spatial sizes (e.g., [5] in", "== 'kaiming': net.apply(weights_init_kaiming) elif init_type == 'orthogonal': net.apply(weights_init_orthogonal) else: raise", "np.array([nrOfI,nrOfC]+list(csz)) return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def centered_identity_map_multiN(sz, spacing, dtype='float32'): \"\"\" Create a", "sched='depth_concat') return input def forward(self, m,new_s=None): m = m *", "print(torch.sum(go[0].data)) return gi[0], gi[1], gi[2] class ConvBnRel(nn.Module): # conv +", "same_padding=False, bn=False, reverse=False, bias=False): super(ConvBnRel, self).__init__() padding = int((kernel_size -", "evaluate :param mu: array indicating the mean :param sig: array", "2: dim = 1 elif Ab.shape[1] == 6: dim =", "= AdaptVal(weights) if get_preweight_from_network: tmp.requires_grad = True else: tmp =", "= np.array([nrOfI, dim]+list(csz)) if get_field_from_external_network: tmp = MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad =", "to do here, these are already the same file return", "nrOfIxnrOfCxXxYxZ \"\"\" csz = np.array(sz) # just to make sure", "= get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) return mask.detach() def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2):", "mask**2 if pow ==3: mask = mask*mask*mask return mask #", "= dict() pl = [] for key in pd: pl.append(pd[key])", "dictionary which keeps track of the keys based on memory", "# from builtins import str # from builtins import range", "get_dim_of_affine_transform(Ab[0,:]) nr_of_images = Ab.size()[0] if nr_of_images != sz[0]: raise ValueError('Incompatible", "= np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) - np.power(X[1,:, :] - mu[1], 2.) / (2", "use_01_input=True): if spline_order not in [0, 1, 2, 3, 4,", "+[1]*dim # spacing_ratio_t = spacing_ratio_t.view(*sp_sz) # new_var_list = [var*spacing_ratio_t if", "inv(A), d = -Cb :param Ab: B x pars (batch", ":param v: torch array :return: numpy array \"\"\" dim =", "array csz = np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights = torch.empty(*csz) # set the", "return idnp def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05): \"\"\"generate a smooth weight mask", ":, :] = id[1] elif dim == 3: idnp =", "desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" :param I: B C X", "check if here should be add assert assert abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))<", "0.038, 0.042) elif classname.find('Linear') != -1: init.uniform(m.weight.data, 0.0, 0.02) elif", "raise ValueError('Only dimensions 1-3 are currently supported for the identity", "= np.zeros([1, sz[0]], dtype=dtype) # idnp[0, :] = id[0] *", "16 - bit floating point: torch.HalfTensor, torch.cuda.HalfTensor # todo: maybe", "new_s): input=None if self.net_sched == 'm_only': input = m elif", "2, 3]: raise ValueError('Only supports dimensions 1, 2, and 3.')", "requires_grad = True) self.get_net_sched() #self.net.register_backward_hook(bh) def get_net_sched(self, debugging=True, using_bn=True, active_unit='relu',", "(only supported in CUDA at the moment). ' 'Some functionality", "affine transforms') phiR = MyTensor(sz).zero_().type_as(phi) for nrI in range(nr_of_images): phiR[nrI,", "True else: tmp = Parameter(tmp) return tmp def create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1,", "running this code') spacing = np.ones(dim) centered_id = centered_identity_map(sz,spacing) g", "'kaiming': net.apply(weights_init_kaiming) elif init_type == 'orthogonal': net.apply(weights_init_orthogonal) else: raise NotImplementedError('initialization", "= sampler.downsample_image_to_size(I, spacing, low_res_size[2::],spline_order) return low_res_image def individual_parameters_to_model_parameters(ind_pars): model_pars =", "centered identity map (shifted so it is centered around 0)", "v = v.permute(0,2,3,1) if dim ==3: v = v.permute(0,2,3,4,1) return", "else: raise ValueError('Only dimensions 1-3 are currently supported for the", "# else: # new_var_list = var_list # return new_var_list #", "1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_xavier(m): classname = m.__class__.__name__ #", "\"\"\" dim = len(mu) if dim == 1: g =", "already be in the right format model_pars = ind_pars else:", "import SplineInterpolation_ND_BCXYZ import os try: from .libraries.functions.nn_interpolation import get_nn_interpolation except", "spacing,dtype=dtype) return id def identity_map_multiN(sz,spacing,dtype='float32'): \"\"\" Create an identity map", "bias=False): super(ConvBnRel, self).__init__() padding = int((kernel_size - 1) // 2)", "if classname.find('Conv') != -1: space_normal(m.weight.data) elif classname.find('Linear') != -1: space_normal(m.weight.data)", "self.bn(x) if self.active_unit is not None: x = self.active_unit(x) return", "the sz is odd Otherwise shifts everything by 0.5*spacing :param", "= id[1] elif dim==3: idnp = np.zeros([3,sz[0], sz[1], sz[2]], dtype=dtype)", "spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 3: return _compute_warped_image_multiNC_3d(I0, phi, spacing,", "# return omt_const.view(*sz) def get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params = pars.ParameterDict() s_m_params['smoother']['type'] =", "new_var_list # def identity_map(sz,spacing,dtype='float32'): \"\"\" Returns an identity map. :param", "the column vector Ab to the identity transform. :param Ab:", "size of an image in BxCxXxYxZ format :param spacing: list", "= id[1] * spacing_ratio[1] # idnp[2, :, :, :] =", "batch size). :param Ab: Parameter vectors B x pars (batch", "I.shape[2:] spacing = 1. / (np.array(img_sz) - 1) if identity_map", "'difference': input = moving-target return input def bh(m,gi,go): print(\"Grad Input\")", ":param phi: map; format nrCxXxYxZ (nrC corresponds to dimension) :return:", "= centered_identity_map(sz[2::], spacing,dtype=dtype) return id def identity_map_multiN(sz,spacing,dtype='float32'): \"\"\" Create an", "m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.normal(m.weight.data) elif classname.find('Linear')", "pars (batch size x param. vector) :return: Inverse of affine", "a dictionary assume that they come from the optimizer #", ">= 1): print('WARNING: Could not compute low_res_size as factor was", "def weights_init_orthogonal(m): classname = m.__class__.__name__ print(classname) if classname.find('Conv') != -1:", "torch.nn.init as init from . import module_parameters as pars from", "map (shifted so it is centered around 0) :param sz:", "as nn import torch.nn.init as init from . import module_parameters", "parameter vector :return: dimensionality of transform (1,2,or 3) \"\"\" nr", "[1,1]+ list(img_sz) mask = AdaptVal(torch.ones(*mask_sz))*mask_value if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif", "Input\") print((torch.sum(gi[0].data), torch.sum(gi[1].data))) print(\"Grad Output\") print(torch.sum(go[0].data)) return gi[0], gi[1], gi[2]", "warped image of size BxCxXxYxZ \"\"\" dim = I0.dim()-2 if", "torch from torch.nn.parameter import Parameter from torch.autograd import Variable from", "transforms') phiR = MyTensor(sz).zero_().type_as(phi) for nrI in range(nr_of_images): phiR[nrI, ...]", "sm.smooth(mask) return mask.detach() def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2): \"\"\"generate a smooth weight", "classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.normal(m.weight.data)", "over all the channels and add the results m =", "import torch.nn as nn import torch.nn.init as init from .", "param['forward_model']['smoother']['omt_weight_penalty'] # min_std = torch.min(stds) # max_std = torch.max(stds) #", "and 'model_params' for par in ind_pars: model_pars[par['name']] = par['model_params'] return", "= (np.ceil((np.array(sz[2:]) * np.array(factor)))).astype('int16') if lowResSize[-1] % 2 != 0:", "new_s, sched='depth_concat') elif self.net_sched == 'm_f_s_t': input = organize_data(m, self.s,", "lift_to_dimension(A, dim): \"\"\"Creates a view of A of dimension dim", "return torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else: raise ValueError('Unknown data type: '", "MyTensor(*(csz.tolist())).normal_(0.,1e-7) def create_vector_parameter(nr_of_elements): \"\"\"Creates a vector parameters with a specified", "return new_var_list # def identity_map(sz,spacing,dtype='float32'): \"\"\" Returns an identity map.", "self.fc = nn.Linear(in_features, out_features) if active_unit == 'relu': self.active_unit =", "Could not compute low_res_size as factor was ' + str(factor))", "in 3D) :param nrOfI: number of images :return: returns vector", "tensor[:, :,:, :, :, -1] = tensor[:, :, :, -2]", "by creating a different view (effectively adding dimensions) Iw =", "middle) if the sz is odd # Otherwise shifts everything", "tuple of (parameter_list, name_dictionary) \"\"\" par_to_name_dict = dict() pl =", "phi.size() dim = get_dim_of_affine_transform(Ab[0,:]) nr_of_images = Ab.size()[0] if nr_of_images !=", "nrOfI, sz, spacing): \"\"\"Computes the vector momentum from the scalar", "and returns it as a numpy array on the cpu", "image to a given desired size :param I: Input image", "else: raise ValueError('Can only compute Gaussians in dimensions 1-3') def", "sig: array indicating the standard deviations for the different dimensions", "i.e., all columns stacked on top of each other. :param", "array :return: numpy array \"\"\" return (v.detach()).cpu().numpy() def cxyz_to_xyzc( v", "beta #When affine=False the output of BatchNorm is equivalent to", "self.active_unit(x) return x class FcRel(nn.Module): # fc+ relu(option) def __init__(self,", "Ab[8] * phi[2, ...] + Ab[11] else: raise ValueError('Only supports", "low-res size from a (high-res) sz :param sz: size (high-res)", "\"\"\" Create vector field torch Variable of given size :param", "dim = 2 elif Ab.shape[1] == 12: dim = 3", "d.update(d2) return d def get_parameter_list_from_parameter_dict(pd): \"\"\"Takes a dictionary which contains", "x: numpy array :return: True if NaNs are present, False", "(np.array(sz[2::]) - 1) / (np.array(lowResSize[2::]) - 1) ########################################## Adaptive Net", "- mean[x]}{ \\sqrt{Var[x] + \\epsilon}} * gamma + beta #When", "gamma=1 and beta=0 as constants. self.bn = nn.BatchNorm2d(out_channels, eps=0.0001, momentum=0,", "omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05): \"\"\"generate a smooth weight mask for the omt", "a given desired size :param I: Input image (expected to", "nrOfI=1, nrOfC=1): \"\"\" Create vector field torch Parameter of given", "of image :param lowResSize: size of low re parameterization :return:", "(high-res) sz :param sz: size (high-res) :param factor: low-res factor", "= np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] else: raise ValueError('Only dimensions 1-3 are", "= I.shape[2:] spacing = 1. / (np.array(img_sz) - 1) if", "= AdaptVal(torch.Tensor(spacing_ratio)) # sp_sz = [1]+[dim] +[1]*dim # spacing_ratio_t =", "mu[0], 2.) / (2 * np.power(sig[0], 2.)) -np.power(X[1,:, :, :]", "standard deviations for the different dimensions :return: Normalized Gaussian evaluated", "nr_of_images = sz[0] for nrI in range(nr_of_images): set_affine_transform_to_identity(Ab[nrI, :]) def", "reduction_factor = np.prod(np.array(sz)) condition = True if type(v.data) == torch.cuda.FloatTensor", "the number of channels is the same nrOfI = sz[0]", "Create an identity map :param sz: size of an image", "else: # #odd # id[d] -= spacing[d]*((sz[d]+1)//2) # # #", "* phi[0, ...] + Ab[3] * phi[1, ...] + Ab[6]", "NaNs. :param x: numpy array :return: True if NaNs are", "my_hasnan(x): \"\"\"Check if any input elements are NaNs. :param x:", "in the middle) if the sz is odd Otherwise shifts", "np.mgrid[0:sz[0]] elif dim==2: id = np.mgrid[0:sz[0],0:sz[1]] elif dim==3: id =", "should be list of Variable \"\"\" return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for elem", "I: B C X Y Z :param spacing: spx spy", "list): lowResSize[2::] = (np.ceil((np.array(sz[2:]) * factor))).astype('int16') else: lowResSize[2::] = (np.ceil((np.array(sz[2:])", "1: id = np.zeros([nrOfI, 1, sz[2]], dtype=dtype) elif dim ==", "idnp = np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) # idnp[0, :,", "set the default if sched =='w_K_w': gaussian_std_weights = [torch.sqrt(std_w) for", "else: raise ValueError('Unknown data type: ' + str( type(v.data))) def", "n in range(Ab.shape[0]): tm_inv = torch.inverse(Ab[n, :, :dim]) Ab_inv[n, :,", "def cxyz_to_xyzc( v ): \"\"\" Takes a torch array and", "for new identity map code') raise ValueError('Double check the spacing", "\"\"\"Check if any input elements are NaNs. :param x: numpy", "(high-res) sz. :param sz: size (high-res) :param factor: low-res factor", "simple conv. implementation, generate displacement field \"\"\" def __init__(self, inputs,", "[nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched =='m_f_s': if debugging: self.net", "each other. :param Ab: parameter vector :return: dimensionality of transform", "input = organize_data(input, self.t, sched='depth_concat') return input def forward(self, m,new_s=None):", "# todo: maybe find a cleaner way of handling this", ":param lam: scalar momentum, BxCxXxYxZ :param I: image, BxCxXxYxZ :param", "used as an input to an optimizer. :param pd: parameter", "dim==2: id = np.mgrid[0:sz[0],0:sz[1]] elif dim==3: id = np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else:", "nn.Sequential(*net) elif self.net_sched == 'm_d_s': if debugging: self.net = nn.Conv2d(self.dim+1,", "the affine transforms to the identity (in the case of", "module_parameters as pars from .spline_interpolation import SplineInterpolation_ND_BCXYZ import os try:", "already the same file return else: os.remove(abs_t_with_right_ext) # now we", "= get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) if pow ==2: mask =", "self.net_input padding_size = (kernel_size-1)//2 if self.net_sched == 'm_only': if debugging:", "Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1, 2) Cd = Cd.view(Cd.shape[0], dim+1,", "= m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.normal(m.weight.data) elif", "omt_const.view(*sz) def get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params = pars.ParameterDict() s_m_params['smoother']['type'] = 'gaussian' s_m_params['smoother']['gaussian_std']", "symmetric training if I.shape[0] != identity_map.shape[0]: n_batch = I.shape[0] desiredSize", "1, 2, and 3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1,2) Ab_inv", "in a dim+1 array if dim == 1: idnp =", "np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights = torch.empty(*csz) # set the default if sched", "# if do_transform: # min_spacing = np.min(spacing) # spacing_ratio =min_spacing/spacing", "of doing it return spacing * (np.array(sz[2::]) - 1) /", "==3: mask = mask*mask*mask return mask # def compute_omt_const(stds,param,dim): #", "optimizer # (i.e., list and each list element has a", "them') if current_dim == dim: return A else: return A.reshape([1]*(dim-current_dim)+list(A.shape))", "momentum=0, affine=True) if bn else None if active_unit == 'relu':", "= fdt.dXc(I)*lam elif dim == 2: m[:, 0, :, :]", "\"\"\"Returns the corresponding low-res size from a (high-res) sz. :param", "return spacing * (np.array(sz[2::]) - 1) / (np.array(lowResSize[2::]) - 1)", "handling this # this is to make sure that subsequent", "a dictionary which contains key value pairs for model parameters", "\"\"\"Create vector field torch Variable of given size. :param sz:", "2.)) -np.power(X[1,:, :, :] - mu[1], 2.) / (2 *", "low_res_size, spline_order): import mermaid.image_sampling as IS sampler = IS.ResampleImage() low_res_image,", "def set_affine_transform_to_identity_multiN(Ab): \"\"\"Set the affine transforms to the identity (in", "elif classname.find('Linear') != -1: init.uniform(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm2d') !=", "= self.active_unit(x) return x class FcRel(nn.Module): # fc+ relu(option) def", "...] + Ab[10] phiR[2, ...] = Ab[2] * phi[0, ...]", "right format model_pars = ind_pars else: # if ind_pars is", "import mermaid.image_sampling as IS sampler = IS.ResampleImage() low_res_image, _ =", "of dimension dimxXxYxZ \"\"\" dim = len(sz) if dim ==", "# print(classname) if classname.find('Conv') != -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('Linear')", "of size XxYxZ \"\"\" # implements this by creating a", "spacing, dtype='float32'): \"\"\" Create a centered identity map (shifted so", "+ list(I0.size()))), phi.view(torch.Size([1] + list(phi.size()))), spacing, spline_order, zero_boundary, use_01_input) return", "= 'm_only' self.s = inputs['s'].detach() self.t = inputs['t'].detach() self.mask =", "mask.detach() def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2): \"\"\"generate a smooth weight mask for", "= organize_data(input, self.t, sched='depth_concat') elif self.net_sched == 'm_d_s_f_t': input =", "= id[0] elif dim==2: idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype)", "\"\"\" def __init__(self, inputs, dim, net_sched=None): # settings should include", "currently supported for the centered identity map') return idnp #", "else: net = \\ [ConvBnRel(self.dim, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn),", ":, :] = fdt.dXc(I)*lam m[:, 1, :, :] = fdt.dYc(I)*lam", "rescale by the ratio # if dim == 1: #", "2, and 3.') return phiR def apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies an affine", "ValueError('Can only convert scalar to vector momentum in dimensions 1-3')", "centered_identity_map(sz, spacing, dtype='float32'): \"\"\" Returns a centered identity map (with", "= len(mu) if dim == 1: g = np.exp(-np.power(X[0, :]", "1: id = np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif dim == 2: id =", "= nn.Conv2d(self.dim+2, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net =", "1 elif nr==6: return 2 elif nr==12: return 3 else:", "the case of arbitrary batch size). :param Ab: Parameter vectors", "np.exp(-np.power(X[0,:, :, :] - mu[0], 2.) / (2 * np.power(sig[0],", "\"\"\" # implements this by creating a different view (effectively", "factor (needs to be <1) :return: low res size \"\"\"", ":return: returns transformed map \"\"\" sz = phi.size() dim =", "# return get_warped_label_map(I0,phi,spacing) stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif", "identity map of dimension dimxXxYxZ # \"\"\" # dim =", "moment). ' 'Some functionality may not be available.') def my_hasnan(x):", "###################################################3 def space_normal(tensors, std=0.1): \"\"\" space normalize for the net", "= sz[0] if dim == 1: id = np.zeros([nrOfI, 1,", "3: m[:, 0, :, :, :] = fdt.dXc(I)*lam m[:, 1,", "with the identity transform) :return: \"\"\" dim = get_dim_of_affine_transform(Ab) if", "for n in range(nrOfI): id[n, ...] = centered_identity_map(sz[2::], spacing,dtype=dtype) return", "idnp[0, :] = id[0] * spacing_ratio[0] # elif dim ==", "# implements this by creating a different view (effectively adding", "(high-res) :param factor: low-res factor (needs to be <1) :return:", "the identity map') for n in range(nrOfI): id[n, ...] =", "3.') phiR = MyTensor(sz).zero_().type_as(phi) if dim == 1: phiR =", "identity map') # # min_spacing = np.min(spacing) # spacing_ratio =", "the number of dimensions corresponding to an affine transformation of", "= id[0] idnp[1,:, :] = id[1] elif dim==3: idnp =", "a tuple: the downsampled image, the new spacing after downsampling", "else: tmp = Parameter(tmp) return tmp def create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1, nrOfC=1):", "return input def bh(m,gi,go): print(\"Grad Input\") print((torch.sum(gi[0].data), torch.sum(gi[1].data))) print(\"Grad Output\")", "meaningful way. \"\"\" from __future__ import print_function from __future__ import", "#id[d]*=2./(sz[d]-1) #id[d]-=1. # and now store it in a dim+1", "+= [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_d_s_f_t': if", "+ Ab[4] * phi[1, ...] + Ab[7] * phi[2, ...]", "id[d] -= spacing[d]*((sz[d]+1)//2) # and now store it in a", "input = torch.cat([moving, target], dim=1) elif sched == 'width_concat': input", "super(ConvBnRel, self).__init__() padding = int((kernel_size - 1) // 2) if", "# should already be in the right format model_pars =", "...] + Ab[4] * phi[1, ...] + Ab[7] * phi[2,", "(1,2,or 3) \"\"\" nr = len(Ab) if nr==2: return 1", "pars.ParameterDict() s_m_params['smoother']['type'] = 'gaussian' s_m_params['smoother']['gaussian_std'] = gaussian_std s_m = sf.SmootherFactory(sz,", "which keeps track of the keys based on memory id.", "adding dimensions) Iw = compute_warped_image_multiNC(I0.view(torch.Size([1, 1] + list(I0.size()))), phi.view(torch.Size([1] +", "i.e., XxYxZ :param spacing: list with spacing information [sx,sy,sz] :param", "of BatchNorm is equivalent to considering gamma=1 and beta=0 as", "('float32', 'float64', ...) :return: returns the identity map of dimension", "= inputs['s'].detach() self.t = inputs['t'].detach() self.mask = Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1), requires_grad", "return m def compute_vector_momentum_from_scalar_momentum_multiN(lam, I, nrOfI, sz, spacing): \"\"\"Computes the", "import AdaptVal from .data_wrapper import MyTensor from . import smoother_factory", "5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)]", "1-3 are currently supported for the identity map') return idnp", "ValueError('Only supports dimensions 1, 2, and 3.') def set_affine_transform_to_identity_multiN(Ab): \"\"\"Set", "tensor[:, :, -1,:] = tensor[:, :,-2,:] + tensor[:, :,-2,:] -", "== 1: # idnp = np.zeros([1, sz[0]], dtype=dtype) # idnp[0,", "tensors[n][c].dim() sz = tensors[n][c].size() mus = np.zeros(dim) stds = std", "Gaussians in dimensions 1-3') def _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order, zero_boundary=False,", "in range(dim): # id[d] *= spacing[d] # if sz[d]%2==0: #", "or type(v.data) == torch.cuda.HalfTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else: raise", ":dim] = tm_inv Ab_inv[n, :, dim] = - torch.matmul(tm_inv, Ab[n,:,dim])", "name_dictionary) \"\"\" par_to_name_dict = dict() pl = [] for key", "v.data.dtype == torch.HalfTensor or type(v.data) == torch.cuda.HalfTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor,", "== 1: return _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim ==", "Create vector field torch Variable of given size :param sz:", "return Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7)) def create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False): \"\"\"Create vector field torch", "if dim == 1: idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0,", "ratio # if dim == 1: # idnp = np.zeros([1,", "0.0, 0.02) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data,", ":, -3, :] tensor[:, :,:, :, -1, :] = tensor[:,", "pars (batch size x param. vector); will be overwritten with", "images :return: returns vector field of size nrOfIxdimxXxYxZ \"\"\" nr_of_mg_weights", "def interoplate_boundary_right(tensor): dim = len(tensor.shape)-2 if dim==1: tensor[:,:,-1]= tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3]", ":param x: numpy array :return: True if NaNs are present,", "or type(v.data) == torch.cuda.DoubleTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif v.data.dtype", "available.') def my_hasnan(x): \"\"\"Check if any input elements are NaNs.", "the corresponding low-res size from a (high-res) sz. :param sz:", "classname.find('Linear') != -1: space_normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0,", "\"\"\"Create vector field torch Parameter of given size. :param sz:", "spacing, spline_order,zero_boundary,use_01_input=use_01_input) else: raise ValueError('Images can only be warped in", "smoother_factory as sf from .data_wrapper import USE_CUDA import numpy as", "zero_boundary=False, use_01_input=True): \"\"\"Warps image. :param I0: image to warp, image", "model_pars = dict() if type(ind_pars) == type(dict()): # should already", "compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c, ...], I[:, c, ...], nrOfI, sz[2::], spacing) return", "remove_infs_from_variable(v): # 32 - bit floating point: torch.FloatTensor, torch.cuda.FloatTensor #", "list element has a dictionary with keys 'name' and 'model_params'", "== 'orthogonal': net.apply(weights_init_orthogonal) else: raise NotImplementedError('initialization method [%s] is not", "id[1] * spacing_ratio[1] # elif dim == 3: # idnp", "[1,1] >>> X = [0,0] >>> print(compute_normalized_gaussian(X, mu, sig) \"\"\"", "I: image, BxCxXxYxZ :param sz: size of image :param spacing:", "forward(self, x): x = self.conv(x) if self.bn is not None:", "# loop over all the channels and add the results", "for nrI in range(nr_of_images): set_affine_transform_to_identity(Ab[nrI, :]) def get_inverse_affine_param(Ab): \"\"\"Computes inverse", "return idnp # # def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: #", "print(\"Grad Output\") print(torch.sum(go[0].data)) return gi[0], gi[1], gi[2] class ConvBnRel(nn.Module): #", "return phiR def compute_normalized_gaussian(X, mu, sig): \"\"\"Computes a normalized Gaussian.", "in [1, 2, 3]: raise ValueError('Only supports dimensions 1, 2,", "AdaptVal(torch.zeros(*mask_sz)) if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm", "centered_id = centered_identity_map(sz,spacing) g = compute_normalized_gaussian(centered_id, mus, stds) tensors[n,c] =", "= None def forward(self, x): x = self.conv(x) if self.bn", "return torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif v.data.dtype == torch.DoubleTensor or type(v.data)", "2, kernel_size, 1, padding=padding_size, bias=False,groups=2) else: net = \\ [ConvBnRel(self.dim,", "for the low-res parameterization from image spacing :param spacing: image", "dim=3) elif sched == 'list_concat': input = torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif sched", "identity_map: :return: \"\"\" if spacing is None: img_sz = I.shape[2:]", "1: id = np.mgrid[0:sz[0]] elif dim == 2: id =", "elif self.net_sched == 'm_f_s_t': if debugging: self.net = nn.Conv2d(self.dim+2, self.dim,", "sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') elif self.net_sched == 'm_d_s_f_t':", "= id[0] idnp[1,:, :, :] = id[1] idnp[2,:, :, :]", "= organize_data(input, self.t, sched='depth_concat') elif self.net_sched == 'm_f_s_t': input =", "is equivalent to considering gamma=1 and beta=0 as constants. self.bn", "parameter dictionary :return: list of parameters \"\"\" pl = []", "= torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_inv = torch.inverse(Ab[n, :,", "nrI in range(nr_of_images): phiR[nrI, ...] = apply_affine_transform_to_map(Ab[nrI, :], phi[nrI, ...])", "id[0] idnp[1,:, :, :] = id[1] idnp[2,:, :, :] =", ":, :] - mu[0], 2.) / (2 * np.power(sig[0], 2.))", "m[:, 2, :, :, :] = fdt.dZc(I)*lam else: raise ValueError('Can", "number of channels :return: returns vector field of size nrOfIxnrOfCxXxYxZ", "# add a dummy first index for d in range(dim):", "running torch.cuda.synchronize() print(start.elapsed_time(end)) return output return __time_warped_function def interoplate_boundary_right(tensor): dim", "are currently supported for the centered identity map') return idnp", "now store it in a dim+1 array if dim ==", "= apply_affine_transform_to_map(Ab[nrI, :], phi[nrI, ...]) return phiR def compute_normalized_gaussian(X, mu,", "size from a (high-res) sz. :param sz: size (high-res) :param", "dictionary with keys 'name' and 'model_params' for par in ind_pars:", "it into a list of parameters that can be used", "= identity_map[:n_batch] resampled, new_spacing = resample_image(I, spacing, desiredSize, spline_order=spline_order, zero_boundary=zero_boundary,", "sched == 'list_concat': input = torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif sched == 'difference':", "dim == 3: id = np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else: raise ValueError('Only dimensions", "range(tensors.size()[0]): for c in range(tensors.size()[1]): dim = tensors[n][c].dim() sz =", "desiredSize = desiredSize.copy() desiredSize[0] = n_batch identity_map = identity_map[:n_batch] resampled,", "sz[2]], dtype=dtype) idnp[0,:, :, :] = id[0] idnp[1,:, :, :]", "mu[1], 2.) / (2 * np.power(sig[1], 2.)) -np.power(X[2,:, :, :]", "get_dim_of_affine_transform(Ab): \"\"\"Returns the number of dimensions corresponding to an affine", "returns the vector momentum \"\"\" fdt = fd.FD_torch(spacing) dim =", "deviations for the different dimensions :return: Normalized Gaussian evaluated at", "return tensors for n in range(tensors.size()[0]): for c in range(tensors.size()[1]):", "<1) :return: low res size \"\"\" if (factor is None)", "= 1 elif Ab.shape[1] == 6: dim = 2 elif", "model_pars[par['name']] = par['model_params'] return model_pars def compute_vector_momentum_from_scalar_momentum_multiNC(lam, I, sz, spacing):", "def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2): \"\"\"generate a smooth weight mask for the", "nr==6: return 2 elif nr==12: return 3 else: raise ValueError('Only", "1: m[:, 0, :] = fdt.dXc(I)*lam elif dim == 2:", "create_vector_parameter(nr_of_elements): \"\"\"Creates a vector parameters with a specified number of", "c in range(tensors.size()[1]): dim = tensors[n][c].dim() sz = tensors[n][c].size() mus", "spz :param desiredSize: B C X Y Z :param spline_order:", "if same_padding else 0 if not reverse: self.conv = nn.Conv2d(in_channels,", "else 0 if not reverse: self.conv = nn.Conv2d(in_channels, out_channels, kernel_size,", "data-type ('float32', 'float64', ...) :return: returns the identity map of", "# 32 - bit floating point: torch.FloatTensor, torch.cuda.FloatTensor # 64", "padding = int((kernel_size - 1) // 2) if same_padding else", "1. / (np.array(img_sz) - 1) if identity_map is not None:", "import module_parameters as pars from .spline_interpolation import SplineInterpolation_ND_BCXYZ import os", "dimensions 1-3') def _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): if", "phi[1, ...] + Ab[8] * phi[2, ...] + Ab[11] else:", "len(sz) - 2 nrOfI = sz[0] if dim == 1:", "!= -1: init.uniform(m.weight.data, 0.038, 0.042) elif classname.find('Linear') != -1: init.uniform(m.weight.data,", "idnp[0, :, :] = id[0] * spacing_ratio[0] # idnp[1, :,", "column vector :param phi: map; format nrCxXxYxZ (nrC corresponds to", "# Waits for everything to finish running torch.cuda.synchronize() print(start.elapsed_time(end)) return", "elif self.net_sched == 'm_d_s': if debugging: self.net = nn.Conv2d(self.dim+1, self.dim,", "# else: # raise ValueError('Only dimensions 1-3 are currently supported", "not isinstance(I, torch.Tensor): I = torch.Tensor(I) is_numpy = True sz", "appropriate dimension \"\"\" current_dim = len(A.shape) if current_dim > dim:", "to the identity (in the case of arbitrary batch size).", "\\frac{x - mean[x]}{ \\sqrt{Var[x] + \\epsilon}} * gamma + beta", "parameter dictionary :return: tuple of (parameter_list, name_dictionary) \"\"\" par_to_name_dict =", "pd: pl.append(pd[key]) return pl def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same as get_parameter_list_from_parameter_dict; but", "# :return: returns the identity map of dimension dimxXxYxZ #", "do_transform: # min_spacing = np.min(spacing) # spacing_ratio =min_spacing/spacing # dim", "reverse: self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=padding, bias=bias) else:", "########################################### if identity_map is not None: idDes = identity_map else:", "\"\"\" sz = phi.size() dim = len(sz) - 1 if", "= nn.Sequential(*net) elif self.net_sched == 'm_d_s_f_t': if debugging: self.net =", "torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_param = torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim] =", "dict() pl = [] for key in pd: pl.append(pd[key]) par_to_name_dict[pd[key]]", "v.data.dtype==torch.float32: return torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif v.data.dtype == torch.DoubleTensor or", "sz = phi.size() dim = len(sz) - 1 if dim", "stn(I0, phi) return I1_warped def compute_warped_image(I0, phi, spacing, spline_order, zero_boundary=False,", "nrOfC=1): \"\"\" Create vector field torch Parameter of given size", "map of dimension dimxXxYxZ \"\"\" dim = len(sz) if dim==1:", ":return: returns spacing of low res parameterization \"\"\" #todo: check", "transform later!\\n\\n') return lowResSize def get_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\" Computes", "# # and now store it in a dim+1 array", "!= -1: space_normal(m.weight.data) elif classname.find('Linear') != -1: space_normal(m.weight.data) elif classname.find('BatchNorm2d')", "0.0) def init_weights(net, init_type='normal'): print('initialization method [%s]' % init_type) if", "size dimxXxYxZ \"\"\" dim = len(sz) csz = np.array(sz) #", "considering gamma=1 and beta=0 as constants. self.bn = nn.BatchNorm2d(out_channels, eps=0.0001,", "# settings should include [using_bias, using bn, using elu] #", "indicating the mean :param sig: array indicating the standard deviations", "= organize_data(m,self.s,sched='depth_concat') elif self.net_sched == 'm_d_s': input = organize_data(m, new_s,", "= - torch.matmul(tm_inv, Ab[n,:,dim]) inv_affine_param = Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0], -1) return", ":] = id[1] * spacing_ratio[1] # idnp[2, :, :, :]", "if I.shape[0] != identity_map.shape[0]: n_batch = I.shape[0] desiredSize = desiredSize.copy()", "warping, size dimxXxYxZ :param spacing: image spacing [dx,dy,dz] :return: returns", "self.net_sched == 'm_d_s': input = organize_data(m, new_s, sched='depth_concat') elif self.net_sched", "builtins import str # from builtins import range import torch", "compute_normalized_gaussian(centered_id, mus, stds) tensors[n,c] = torch.from_numpy(g) def weights_init_uniform(m): classname =", "1, padding=padding_size, bias=False,groups=2) else: net = \\ [ConvBnRel(self.dim, 20, 5,", "cpu :param v: torch array :return: numpy array \"\"\" dim", "tmp.requires_grad = True else: tmp = Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return tmp def", "# 64 - bit floating point: torch.DoubleTensor, torch.cuda.DoubleTensor # 16", "bit floating point: torch.FloatTensor, torch.cuda.FloatTensor # 64 - bit floating", "g = g/g.sum() return g elif dim == 3: g", "compute_vector_momentum_from_scalar_momentum_multiNC(lam, I, sz, spacing): \"\"\"Computes the vector momentum from the", "number of channels is the same nrOfI = sz[0] nrOfC", "2.))) g = g / g.sum() return g else: raise", "g/g.sum() return g elif dim == 3: g = np.exp(-np.power(X[0,:,", "[dx,dy,dz] :return: returns the warped image of size BxCxXxYxZ \"\"\"", "isinstance(factor, list): lowResSize[2::] = (np.ceil((np.array(sz[2:]) * factor))).astype('int16') else: lowResSize[2::] =", "= nn.ELU(inplace=True) else: self.active_unit = None def forward(self, x): x", "np.prod(np.array(sz)) condition = True if type(v.data) == torch.cuda.FloatTensor or v.data.dtype==torch.float32:", "2) Cd = Cd.view(Cd.shape[0], dim+1, dim).transpose(1, 2) updated_param = torch.zeros_like(Ab)", ":return: returns the vector momentum \"\"\" nrOfI = sz[0] #", "self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid: net += [nn.Sigmoid()]", "idnp[2, :, :, :] = id[2] * spacing_ratio[2] # else:", "= SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped = stn(I0, phi) return I1_warped def", "of channels :return: returns vector field of size nrOfIxnrOfCxXxYxZ \"\"\"", "- 1.)) ########################################### if identity_map is not None: idDes =", "space_normal(tensors, std=0.1): \"\"\" space normalize for the net kernel :param", "+ 1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5,", ":, -1, :] = tensor[:, :, :, -2] + tensor[:,", "training if I.shape[0] != identity_map.shape[0]: n_batch = I.shape[0] desiredSize =", "tensor[:, :, -2, :] - tensor[:, :, -3, :] tensor[:,", "raise ValueError('Double check the spacing here before running this code')", "do_transform: # min_spacing = np.min(spacing) # spacing_ratio =spacing/min_spacing # dim", "% init_type) def organize_data(moving, target, sched='depth_concat'): if sched == 'depth_concat':", "mode='fan_in') elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0)", "image size XxYxZ :param phi: map for the warping, size", "For A =[a1,a2,a3], the parameter vector is simply [a1;a2;a3;b], i.e.,", "range(tensors.size()[1]): dim = tensors[n][c].dim() sz = tensors[n][c].size() mus = np.zeros(dim)", "+ 2, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5,", "if current_dim > dim: raise ValueError('Can only add dimensions, but", "\"\"\" csz = np.array(sz) # just to make sure it", "3.') def set_affine_transform_to_identity_multiN(Ab): \"\"\"Set the affine transforms to the identity", "as np from . import finite_differences as fd import torch.nn", ":return: \"\"\" sz = Ab.size() nr_of_images = sz[0] for nrI", "of images :param nrOfC: number of channels :return: returns vector", "self.net_sched =='m_f_s': if debugging: self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size, 1,", "id = np.zeros([nrOfI, 1, sz[2]], dtype=dtype) elif dim == 2:", "-2] - tensor[:, :, :, -3] def get_resampled_image(I, spacing, desiredSize,", "implemented' % init_type) def organize_data(moving, target, sched='depth_concat'): if sched ==", "Variable from .libraries.modules.stn_nd import STN_ND_BCXYZ from .data_wrapper import AdaptVal from", "array for the desired size (excluding B and C, i.e,", "\"\"\"Returns the number of dimensions corresponding to an affine transformation", "self.dim = dim self.net_sched = 'm_only' self.s = inputs['s'].detach() self.t", "spacing_ratio[1] # idnp[2, :, :, :] = id[2] * spacing_ratio[2]", ":param Ab: Parameter vectors B x pars (batch size x", ":return: returns the warped image of size XxYxZ \"\"\" #", "a dummy first index for d in range(dim): id[d] *=", "def init_weights(net, init_type='normal'): print('initialization method [%s]' % init_type) if init_type", "tmp = Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return tmp def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\" Create", "identity map') return idnp # # def centered_min_normalized_identity_map(sz, spacing, dtype='float32'):", "= fd.FD_torch(spacing) dim = len(sz) m = create_ND_vector_field_variable_multiN(sz, nrOfI) if", "if nr==2: return 1 elif nr==6: return 2 elif nr==12:", ":param sz: size of image :param spacing: spacing of image", "+ str(factor)) return np.array(sz) else: low_res_sz = np.array(sz) low_res_sz[2::] =", "sig) \"\"\" dim = len(mu) if dim == 1: g", "if dim==1: id = id.reshape(1,sz[0]) # add a dummy first", "if spacing is None: img_sz = I.shape[2:] spacing = 1.", "supported') if spline_order == 0: # return get_warped_label_map(I0,phi,spacing) stn =", "= [1,1]+ list(img_sz) mask = AdaptVal(torch.zeros(*mask_sz)) if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1", "and 3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1,2) Ab_inv = torch.zeros_like(Ab)", "phiR[nrI, ...] = apply_affine_transform_to_map(Ab[nrI, :], phi[nrI, ...]) return phiR def", "dim = 2 elif Ab.shape[1]==12: dim = 3 if dim", "desired size (excluding B and C, i.e, 1 entry for", "(np.ceil((np.array(sz[2:]) * np.array(factor)))).astype('int16') if lowResSize[-1] % 2 != 0: lowResSize[-1]", "sz[1], sz[2]], dtype=dtype) idnp[0, :, :, :] = id[0] idnp[1,", "os.path.isfile(abs_t_with_right_ext): if os.path.samefile(abs_s,abs_t_with_right_ext): # nothing to do here, these are", "return pl def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same as get_parameter_list_from_parameter_dict; but also returns", "fdt.dYc(I)*lam m[:, 2, :, :, :] = fdt.dZc(I)*lam else: raise", "dim==1: id = np.mgrid[0:sz[0]] elif dim==2: id = np.mgrid[0:sz[0],0:sz[1]] elif", "of size dimxXxYxZ \"\"\" dim = len(sz) csz = np.array(sz)", "supported for the identity map') # now get it into", "-np.power(X[1,:, :, :] - mu[1], 2.) / (2 * np.power(sig[1],", "dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) return", "): \"\"\" Takes a torch array and returns it as", "return g elif dim == 2: g = np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) -", "= 0 if Ab.shape[1]==2: dim = 1 elif Ab.shape[1]==6: dim", "+ Ab[10] phiR[2, ...] = Ab[2] * phi[0, ...] +", "[nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_f_s_t': if debugging:", "size x param. vector); will be overwritten with identity trans.", "= organize_data(m, new_s, sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') return", "a numpy array csz = np.array([nrOfI,nrOfC]+list(csz)) return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def centered_identity_map_multiN(sz,", ":param I: image, batchXxYxZ :param sz: size of image :param", "array \"\"\" dim = len(v.shape)-2 if dim ==2: v =", "np.ndarray) and v.size == 1: return float(v) def checkNan(x): \"\"\"\"", "else: self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding=padding,bias=bias) #y =", "elif nr==12: return 3 else: raise ValueError('Only supports dimensions 1,", "d = d1.copy() d.update(d2) return d def get_parameter_list_from_parameter_dict(pd): \"\"\"Takes a", "1: # id = np.mgrid[0:sz[0]] # elif dim == 2:", "id[1] elif dim==3: idnp = np.zeros([3,sz[0], sz[1], sz[2]], dtype=dtype) idnp[0,:,", "if not reverse: self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=padding,", "of image :return: returns the vector momentum \"\"\" fdt =", "IS sampler = IS.ResampleImage() low_res_image, _ = sampler.downsample_image_to_size(I, spacing, low_res_size[2::],spline_order)", "= sz[1] desiredSizeNC = np.array([nrOfI, nrOfC] + list(desiredSize)) newspacing =", "identity_map else: idDes = AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing))) # now use this", "-1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('Linear') != -1: init.xavier_normal(m.weight.data, gain=1) elif", "view :return: returns view of A of appropriate dimension \"\"\"", "torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim] = tm_param updated_param[n,:,dim] = torch.matmul(Cd[n,:,:dim], Ab[n,:,dim]) +Cd[n,:,dim] updated_param", "weight mask for the omt \"\"\" dim = len(img_sz) mask_sz", ":param I0: image to warp, image size BxCxXxYxZ :param phi:", "to a given desired size :param I: Input image (expected", "phi[2, ...] + Ab[10] phiR[2, ...] = Ab[2] * phi[0,", "0 to 9 are supported') if spline_order == 0: #", "of the keys based on memory id. :param pd: parameter", "init.uniform(m.weight.data, 0.038, 0.042) elif classname.find('Linear') != -1: init.uniform(m.weight.data, 0.0, 0.02)", "-1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def init_weights(net, init_type='normal'): print('initialization", "A of dimension dim (by adding dummy dimensions if necessary).", "return _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 3: return", "= m + compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c, ...], I[:, c, ...], nrOfI,", "// 2) if same_padding else 0 if not reverse: self.conv", "a different view (effectively adding dimensions) Iw = compute_warped_image_multiNC(I0.view(torch.Size([1, 1]", "of Variable \"\"\" return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for elem in x] def", "'float64', ...) :return: returns the identity map of dimension dimxXxYxZ", "= np.zeros([2, sz[0], sz[1]], dtype=dtype) # idnp[0, :, :] =", "[a1;a2;a3;b], i.e., all columns stacked on top of each other.", "torch.from_numpy(g) def weights_init_uniform(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv')", "= \\ [ConvBnRel(self.dim + 2, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn),", "3: # id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] # else: #", "mask_sz = [1,1]+ list(img_sz) mask = AdaptVal(torch.ones(*mask_sz))*mask_value if dim ==2:", "- bit floating point: torch.HalfTensor, torch.cuda.HalfTensor # todo: maybe find", "1: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn =", "include [using_bias, using bn, using elu] # inputs should be", "if identity_map is not None: # todo will remove, currently", "[] for key in pd: pl.append(pd[key]) par_to_name_dict[pd[key]] = key return", "= os.path.splitext(abs_s)[1] abs_t = os.path.abspath(tf) root_t,ext_t = os.path.splitext(abs_t) abs_t_with_right_ext =", "1: return _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 2:", "/ min_std)) ** omt_power) # omt_const = omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) # sz", "self).__init__() padding = int((kernel_size - 1) // 2) if same_padding", "nn.Conv2d(self.dim + 2, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net", "B x pars (batch size x param. vector) :return: Updated", "def centered_identity_map(sz, spacing, dtype='float32'): \"\"\" Returns a centered identity map", "a centered identity map (with 0 in the middle) if", "!= -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0,", "be a dictionary could contain ['s'],['t'] super(AdpSmoother, self).__init__() self.dim =", "pd: parameter dictionary :return: tuple of (parameter_list, name_dictionary) \"\"\" par_to_name_dict", "np.array(id.astype(dtype)) if dim == 1: id = id.reshape(1, sz[0]) #", "\"\"\" dim = I0.dim()-2 if dim == 1: return _compute_warped_image_multiNC_1d(I0,", "= 1. / (np.array(img_sz) - 1) if identity_map is not", "of channels is the same nrOfI = sz[0] nrOfC =", "= \\frac{x - mean[x]}{ \\sqrt{Var[x] + \\epsilon}} * gamma +", "at coordinates in X Example:: >>> mu, sig = [1,1],", "convert scalar to vector momentum in dimensions 1-3') return m", "same file return else: os.remove(abs_t_with_right_ext) # now we can do", "to dimension) :return: returns transformed map \"\"\" sz = phi.size()", "range(nr_of_mg_weights): weights[:, g, ...] = gaussian_std_weights[g] tmp = AdaptVal(weights) if", "list of Variable \"\"\" return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for elem in x]", "zero_boundary) return ID if not is_numpy else ID.numpy(), newspacing def", "raise ValueError('Only supports dimensions 1, 2, and 3.') def set_affine_transform_to_identity(Ab):", "a numpy array csz = np.array([dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,1e-7) def create_vector_parameter(nr_of_elements):", "True if type(v.data) == torch.cuda.FloatTensor or v.data.dtype==torch.float32: return torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor,", "# Otherwise shifts everything by 0.5*spacing # # :param sz:", "using_sigmoid: net += [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched ==", "init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_rd_normal(m): classname = m.__class__.__name__", "torch.DoubleTensor or type(v.data) == torch.cuda.DoubleTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif", "supported for the centered identity map') # # return idnp", "dim == 1: m[:, 0, :] = fdt.dXc(I)*lam elif dim", "- mu[0], 2.)/(2*np.power(sig[0], 2.))) g = g/g.sum() return g elif", "mean: :param std: :return: \"\"\" if isinstance(tensors, Variable): space_normal(tensors.data, std=std)", "from torch.autograd import Variable from .libraries.modules.stn_nd import STN_ND_BCXYZ from .data_wrapper", "...) :return: returns the identity map of dimension dimxXxYxZ \"\"\"", ":, -2] + tensor[:, :, :, -2] - tensor[:, :,", "torch.sum(gi[1].data))) print(\"Grad Output\") print(torch.sum(go[0].data)) return gi[0], gi[1], gi[2] class ConvBnRel(nn.Module):", "sz[d]%2==0: # #even # id[d] -= spacing[d]*(sz[d]//2) # else: #", "returns the identity map of dimension dimxXxYxZ \"\"\" dim =", "and converts it into a list of parameters that can", "2: g = np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) - np.power(X[1,:, :] - mu[1], 2.)", "[5,10,10] in 3D) :return: returns vector field of size dimxXxYxZ", "Net ###################################################3 def space_normal(tensors, std=0.1): \"\"\" space normalize for the", "compute_normalized_gaussian(X, mu, sig): \"\"\"Computes a normalized Gaussian. :param X: map", "into range [0,(sz-1)*spacing]^d # id = np.array(id.astype(dtype)) # if dim", "returns transformed maps \"\"\" sz = phi.size() dim = get_dim_of_affine_transform(Ab[0,:])", "[using_bias, using bn, using elu] # inputs should be a", "pd: parameter dictionary :return: list of parameters \"\"\" pl =", "with identity trans. :return: \"\"\" sz = Ab.size() nr_of_images =", "_compute_low_res_image(I, spacing, low_res_size, spline_order): import mermaid.image_sampling as IS sampler =", "start.record() output = f(input) end.record() # Waits for everything to", "n_batch identity_map = identity_map[:n_batch] resampled, new_spacing = resample_image(I, spacing, desiredSize,", "net.apply(weights_init_rd_normal) elif init_type == 'normal': net.apply(weights_init_normal) elif init_type == 'uniform':", "bh(m,gi,go): print(\"Grad Input\") print((torch.sum(gi[0].data), torch.sum(gi[1].data))) print(\"Grad Output\") print(torch.sum(go[0].data)) return gi[0],", "= Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0], -1) return inv_affine_param def update_affine_param(Ab, Cd): \"\"\"Update", "get_resampled_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" :param I: B", "20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)]", "= np.array([nr_of_images, dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0., 1e-7) def create_ND_vector_field_variable(sz): \"\"\"Create vector", "spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" :param I: B C", ":param sz: just the spatial sizes (e.g., [5] in 1D,", "not implemented\") return warped_label_map def t2np(v): \"\"\" Takes a torch", "images :param nrOfC: number of channels :return: returns vector field", "range(Ab.shape[0]): tm_inv = torch.inverse(Ab[n, :, :dim]) Ab_inv[n, :, :dim] =", "is the correct way of doing it return spacing *", "len(sz) csz = np.array(sz) # just to make sure it", "output of BatchNorm is equivalent to considering gamma=1 and beta=0", "x; C = inv(A), d = -Cb :param Ab: B", "raise ValueError('Can only add dimensions, but not remove them') if", "None) or (factor >= 1): print('WARNING: Could not compute low_res_size", "space_normal(tensors.data, std=std) return tensors for n in range(tensors.size()[0]): for c", "==2: mask = mask**2 if pow ==3: mask = mask*mask*mask", "updated_param = updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return updated_param def apply_affine_transform_to_map(Ab,phi): \"\"\"Applies an affine", "output = f(input) end.record() # Waits for everything to finish", "-2] - tensor[:, :, :, -3] tensor[:, :,:, :, :,", "x): x = self.conv(x) if self.bn is not None: x", "in dimensions 1 to 3') def _get_low_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\"Computes", "out_features) if active_unit == 'relu': self.active_unit = nn.ReLU(inplace=True) elif active_unit", "in 2D, [5,10,10] in 3D) :return: returns vector field of", ":param Ab: affine transform parameter column vectors (batch size x", "raise ValueError('Only dimensions 1-3 are currently supported for the centered", "[ConvBnRel(self.dim + 2, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim,", "[len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for elem in x] def noramlized_spacing_to_smallest(spacing): min_sp = np.min(spacing)", "= (np.ceil((np.array(sz[2:]) * factor))).astype('int16') else: lowResSize[2::] = (np.ceil((np.array(sz[2:]) * np.array(factor)))).astype('int16')", "x): x = self.fc(x) if self.active_unit is not None: x", "the same file return else: os.remove(abs_t_with_right_ext) # now we can", "= var_list # return new_var_list # def identity_map(sz,spacing,dtype='float32'): \"\"\" Returns", "pl = [] for key in pd: pl.append(pd[key]) par_to_name_dict[pd[key]] =", "= stn(I0, phi) return I1_warped def compute_warped_image(I0, phi, spacing, spline_order,", "\"\"\" dim = len(sz)-2 nrOfI = int(sz[0]) if dim ==", "desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" Resample an image to a", "class AdpSmoother(nn.Module): \"\"\" a simple conv. implementation, generate displacement field", "type(dict()): # should already be in the right format model_pars", "weights = torch.empty(*csz) # set the default if sched =='w_K_w':", "1) if identity_map is not None: # todo will remove,", "\"\"\"Same as get_parameter_list_from_parameter_dict; but also returns a dictionary which keeps", "np.zeros([3,sz[0], sz[1], sz[2]], dtype=dtype) idnp[0,:, :, :] = id[0] idnp[1,:,", "low_res_sz def _compute_low_res_image(I, spacing, low_res_size, spline_order): import mermaid.image_sampling as IS", "size XxYxZ :param phi: map for the warping, size dimxXxYxZ", "if classname.find('Conv') != -1: init.uniform(m.weight.data, 0.038, 0.042) elif classname.find('Linear') !=", "init_type == 'normal': net.apply(weights_init_normal) elif init_type == 'uniform': net.apply(weights_init_uniform) elif", "* spacing_ratio[1] # elif dim == 3: # idnp =", "I0.dim()-2 if dim == 1: return _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input)", "classname.find('Linear') != -1: init.normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0,", "sure it is a numpy array csz = np.array([dim]+list(csz)) return", "bn=using_bn)] if using_sigmoid: net += [nn.Sigmoid()] self.net = nn.Sequential(*net) elif", "new identity map code') raise ValueError('Double check the spacing here", "id[0] idnp[1, :, :, :] = id[1] idnp[2, :, :,", "are supported') if spline_order == 0: # return get_warped_label_map(I0,phi,spacing) stn", "apply_affine_transform_to_map(Ab,phi): \"\"\"Applies an affine transform to a map. :param Ab:", "def _compute_low_res_image(I, spacing, low_res_size, spline_order): import mermaid.image_sampling as IS sampler", ":, :, :] = fdt.dYc(I)*lam m[:, 2, :, :, :]", "Ab[3] * phi[1, ...] + Ab[6] * phi[2, ...] +", "def organize_data(moving, target, sched='depth_concat'): if sched == 'depth_concat': input =", ":, :] = id[0] idnp[1,:, :, :] = id[1] idnp[2,:,", "= len(sz) m = create_ND_vector_field_variable_multiN(sz, nrOfI) if dim == 1:", "create_symlink_with_correct_ext(sf, tf): abs_s = os.path.abspath(sf) ext_s = os.path.splitext(abs_s)[1] abs_t =", "size x param. vector) :return: Updated affine parameters \"\"\" dim", "def compute_warped_image(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image. :param", "+= [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_d_s': if", "== torch.DoubleTensor or type(v.data) == torch.cuda.DoubleTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor)", "std: :return: \"\"\" if isinstance(tensors, Variable): space_normal(tensors.data, std=std) return tensors", "affine transform parameter column vectors (batch size x param. vector)", "= id[1] idnp[2, :, :, :] = id[2] else: raise", "= pars.ParameterDict() s_m_params['smoother']['type'] = 'gaussian' s_m_params['smoother']['gaussian_std'] = gaussian_std s_m =", "x).any() def create_symlink_with_correct_ext(sf, tf): abs_s = os.path.abspath(sf) ext_s = os.path.splitext(abs_s)[1]", "'list_concat': input = torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif sched == 'difference': input =", "use_01_input=use_01_input) elif spline_order == 1: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=True,", "equivalent to considering gamma=1 and beta=0 as constants. self.bn =", "dtype='float32'): # \"\"\" # Returns a centered identity map (with", "= spacing/min_spacing # # # # now get it into", "numpy array csz = np.array([nrOfI,nrOfC]+list(csz)) return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def centered_identity_map_multiN(sz, spacing,", "constants. self.bn = nn.BatchNorm2d(out_channels, eps=0.0001, momentum=0, affine=True) if bn else", "identity_map.shape[0]: n_batch = I.shape[0] desiredSize = desiredSize.copy() desiredSize[0] = n_batch", "spacing, low_res_size, spline_order): import mermaid.image_sampling as IS sampler = IS.ResampleImage()", "ValueError('Incompatible number of affine transforms') if dim != len(sz)-2: raise", "with a specified number of elements. :param nr_of_elements: number of", "dim == 3: # id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] #", "nrOfC] + list(desiredSize)) newspacing = spacing * ((sz[2::].astype('float') - 1.)", "builtins import range import torch from torch.nn.parameter import Parameter from", "of handling this # this is to make sure that", "dimensions 1-3 are currently supported for the identity map') #", "__time_warped_function(input=None): start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) start.record() output =", "vector) :param phi: maps; format batchxnrCxXxYxZ (nrC corresponds to dimension)", "# attention that the second dimension here is image dim,", "def get_parameter_list_from_parameter_dict(pd): \"\"\"Takes a dictionary which contains key value pairs", "new spacing after downsampling \"\"\" desiredSize = desiredSize[2:] is_numpy =", "= spacing.size # spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio)) # sp_sz = [1]+[dim]", "\"\"\" par_to_name_dict = dict() pl = [] for key in", "-= spacing[d]*((sz[d]+1)//2) # # # and now store it in", "use_bilinear=False, use_01_input=use_01_input) elif spline_order == 1: stn = STN_ND_BCXYZ(spacing, zero_boundary,", "dim==3: Ab.zero_() Ab[0]=1. Ab[4]=1. Ab[8]=1. else: raise ValueError('Only supports dimensions", "to be <1) :return: low res size \"\"\" if (factor", "the batch size and the number of channels is the", "create_ND_vector_field_variable_multiN(sz, nrOfI) if dim == 1: m[:, 0, :] =", "else: raise ValueError('Only supports dimensions 1, 2, and 3.') return", "= tensors[n][c].dim() sz = tensors[n][c].size() mus = np.zeros(dim) stds =", "factor was ' + str(factor)) return sz else: lowResSize =", "XxYxZ \"\"\" # implements this by creating a different view", "Ab[n,:,dim]) +Cd[n,:,dim] updated_param = updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return updated_param def apply_affine_transform_to_map(Ab,phi): \"\"\"Applies", ":param Ab: affine transform parameter column vector :param phi: map;", "= compute_warped_image_multiNC(label_map, phi, spacing,spline_order=0,zero_boundary=True) # check if here should be", "torch.inverse(Ab[n, :, :dim]) Ab_inv[n, :, :dim] = tm_inv Ab_inv[n, :,", "= np.array([dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,1e-7) def create_vector_parameter(nr_of_elements): \"\"\"Creates a vector parameters", "input = m elif self.net_sched == 'm_f_s': input = organize_data(m,self.s,sched='depth_concat')", "= True if type(v.data) == torch.cuda.FloatTensor or v.data.dtype==torch.float32: return torch.clamp(v,", "\"\"\"Computes the vector momentum from the scalar momentum: :math:`m=\\\\lambda\\\\nabla I`.", "supported for the centered identity map') return idnp # #", "0:sz[1], 0:sz[2]] else: raise ValueError('Only dimensions 1-3 are currently supported", "of images :return: returns vector field of size nrOfIxdimxXxYxZ \"\"\"", ":, :, -2] - tensor[:, :, :, -3] tensor[:, :,:,", "2: # idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) # idnp[0,", "return tmp def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\" Create vector field torch", "np.array(sz) low_res_sz[2::] = (np.ceil((np.array(sz[2::]) * factor))).astype('int16') return low_res_sz def _compute_low_res_image(I,", "-warped_label_map.data.round()))< 0.1, \"nn interpolation is not precise\" else: raise ValueError(\"", "]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) if pow ==2:", "torch.cuda.FloatTensor # 64 - bit floating point: torch.DoubleTensor, torch.cuda.DoubleTensor #", "def __init__(self, in_features, out_features, active_unit='relu'): super(FcRel, self).__init__() self.fc = nn.Linear(in_features,", "the identity map') # now get it into range [0,(sz-1)*spacing]^d", "return A else: return A.reshape([1]*(dim-current_dim)+list(A.shape)) def get_dim_of_affine_transform(Ab): \"\"\"Returns the number", "contain ['s'],['t'] super(AdpSmoother, self).__init__() self.dim = dim self.net_sched = 'm_only'", "spacing): \"\"\"Computes the vector momentum from the scalar momentum: :math:`m=\\\\lambda\\\\nabla", "mu: array indicating the mean :param sig: array indicating the", "= m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.xavier_normal(m.weight.data, gain=1)", "only compute Gaussians in dimensions 1-3') def _compute_warped_image_multiNC_1d(I0, phi, spacing,", "== 1: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn", "kernel_size, stride, padding=padding,bias=bias) #y = \\frac{x - mean[x]}{ \\sqrt{Var[x] +", "classname.find('Conv') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('Linear') != -1:", "# elif dim == 2: # idnp = np.zeros([2, sz[0],", "bias=False) else: net = \\ [ConvBnRel(self.dim + 1, 20, 5,", ":param sz: just the spatial dimensions, i.e., XxYxZ # :param", "list with spacing information [sx,sy,sz] # :param dtype: numpy data-type", "x pars (batch size x param. vector) :return: Updated affine", "+ str(factor)) return sz else: lowResSize = np.array(sz) if not", "import range import torch from torch.nn.parameter import Parameter from torch.autograd", "a smooth weight mask for the omt \"\"\" dim =", "identity_map is not None: # todo will remove, currently fix", "print_function from __future__ import absolute_import # from builtins import str", "sz = phi.size() dim = get_dim_of_affine_transform(Ab[0,:]) nr_of_images = Ab.size()[0] if", "is not a dictionary assume that they come from the", "dimensionality of transform (1,2,or 3) \"\"\" nr = len(Ab) if", "momentum, batchxXxYxZ :param I: image, batchXxYxZ :param sz: size of", "input = self.prepare_data(m,new_s) x= input x = self.net(x) return x", "are NaNs. :param x: numpy array :return: True if NaNs", "dim==1: Ab.zero_() Ab[0]=1. elif dim==2: Ab.zero_() Ab[0]=1. Ab[3]=1. elif dim==3:", "return input def forward(self, m,new_s=None): m = m * self.mask", "3]: raise ValueError('Only supports dimensions 1, 2, and 3.') Ab", "vector. For A =[a1,a2,a3], the parameter vector is simply [a1;a2;a3;b],", "sz: just the spatial sizes (e.g., [5] in 1D, [5,10]", "I1_warped = stn(I0, phi) return I1_warped def compute_warped_image(I0, phi, spacing,", "2 nrOfI = sz[0] if dim == 1: id =", "centered identity map') # # return idnp # # def", "of each other. :param Ab: parameter vector :return: dimensionality of", "Ab.zero_() Ab[0]=1. Ab[3]=1. elif dim==3: Ab.zero_() Ab[0]=1. Ab[4]=1. Ab[8]=1. else:", "I1_warped = stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_3d(I0, phi, spacing,", "elif dim == 3: idnp = np.zeros([3, sz[0], sz[1], sz[2]],", "m def create_ND_vector_field_variable_multiN(sz, nr_of_images=1): \"\"\" Create vector field torch Variable", "low-res factor (needs to be <1) :return: low res size", "size and the number of channels is the same nrOfI", "resample_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" Resample an image", "vector (will be overwritten with the identity transform) :return: \"\"\"", ":, :] = id[2] else: raise ValueError('Only dimensions 1-3 are", "ind_pars: model_pars[par['name']] = par['model_params'] return model_pars def compute_vector_momentum_from_scalar_momentum_multiNC(lam, I, sz,", ":param mean: :param std: :return: \"\"\" if isinstance(tensors, Variable): space_normal(tensors.data,", "elif init_type == 'uniform': net.apply(weights_init_uniform) elif init_type == 'xavier': net.apply(weights_init_xavier)", "vector field of size nrOfIxdimxXxYxZ \"\"\" dim = len(sz) csz", "==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask", "# if sz[d]%2==0: # #even # id[d] -= spacing[d]*(sz[d]//2) #", "- bit floating point: torch.FloatTensor, torch.cuda.FloatTensor # 64 - bit", "C = inv(A), d = -Cb :param Ab: B x", "at the moment). ' 'Some functionality may not be available.')", "supported in CUDA at the moment). ' 'Some functionality may", "from . import module_parameters as pars from .spline_interpolation import SplineInterpolation_ND_BCXYZ", "dim+1, dim).transpose(1,2) Ab_inv = torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_inv", "== 1: phiR = phi * Ab[0] + Ab[1] elif", "# a_11x+a_21y+b1 phiR[1, ...] = Ab[1] * phi[0, ...] +", "spline_order, zero_boundary, use_01_input) return Iw.view(I0.size()) def compute_warped_image_multiNC(I0, phi, spacing, spline_order,", "= spacing_ratio_t.view(*sp_sz) # new_var_list = [var*spacing_ratio_t if var is not", "transformation. Formally: C(Ax+b)+d = CAx+Cb+d = x; C = inv(A),", "return new_var_list # def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing", "= True sz = np.array(list(I.size())) # check that the batch", ":return: returns spacing of low res parameterization \"\"\" # todo:", "(kernel_size-1)//2 if self.net_sched == 'm_only': if debugging: self.net = nn.Conv2d(2,", "csz = np.array([nrOfI,nrOfC]+list(csz)) return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def centered_identity_map_multiN(sz, spacing, dtype='float32'): \"\"\"", "stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if", "0, :, :, :] = fdt.dXc(I)*lam m[:, 1, :, :,", "it is a numpy array csz = np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights =", "stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order == 1:", "Formally: C(Ax+b)+d = CAx+Cb+d :param Ab: B x pars (batch", "size \"\"\" if (factor is None): print('WARNING: Could not compute", "id = np.mgrid[0:sz[0]] # elif dim == 2: # id", "is a numpy array csz = np.array([nrOfI, dim]+list(csz)) if get_field_from_external_network:", "return warped_label_map def t2np(v): \"\"\" Takes a torch array and", "init.constant(m.bias.data, 0.0) def weights_init_normal(m): classname = m.__class__.__name__ # print(classname) if", "map') # # return idnp # # def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): #", "== torch.cuda.DoubleTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif v.data.dtype == torch.HalfTensor", ":param sz: just the spatial dimensions, i.e., XxYxZ :param spacing:", "maybe find a cleaner way of handling this # this", "== 1: g = np.exp(-np.power(X[0, :] - mu[0], 2.)/(2*np.power(sig[0], 2.)))", "phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) else: raise ValueError('Images can only be warped", "sz, lowResSize): \"\"\"Computes spacing for the low-res parametrization from image", "= False if not isinstance(I, torch.Tensor): I = torch.Tensor(I) is_numpy", "as sf from .data_wrapper import USE_CUDA import numpy as np", ".. todo:: Reorganize this package in a more meaningful way.", "2 :return: resulting dictionary \"\"\" d = d1.copy() d.update(d2) return", "for n in range(Ab.shape[0]): tm_param = torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim] = tm_param", "if self.active_unit is not None: x = self.active_unit(x) return x", "of elements. :param nr_of_elements: number of vector elements :return: returns", "True) self.get_net_sched() #self.net.register_backward_hook(bh) def get_net_sched(self, debugging=True, using_bn=True, active_unit='relu', using_sigmoid=False ,", "raise ValueError(\" the label warping method is not implemented\") return", "for g in range(nr_of_mg_weights): weights[:, g, ...] = gaussian_std_weights[g] tmp", "# #odd # id[d] -= spacing[d]*((sz[d]+1)//2) # # # and", "= fdt.dYc(I)*lam m[:, 2, :, :, :] = fdt.dZc(I)*lam else:", "id[0] * spacing_ratio[0] # elif dim == 2: # idnp", "elements :return: returns the parameter vector \"\"\" return Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7))", "sz[0] if dim == 1: id = np.zeros([nrOfI, 1, sz[2]],", "tensor[:, :, :, -2] + tensor[:, :, :, -2] -", "update_affine_param(Ab, Cd): \"\"\"Update affine parameters. Formally: C(Ax+b)+d = CAx+Cb+d :param", "elif sched == 'width_concat': input = torch.cat((moving, target), dim=3) elif", "for n in range(Ab.shape[0]): tm_inv = torch.inverse(Ab[n, :, :dim]) Ab_inv[n,", "= root_t + ext_s if os.path.isfile(abs_t_with_right_ext): if os.path.samefile(abs_s,abs_t_with_right_ext): # nothing", "= nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding=padding,bias=bias) #y = \\frac{x -", "def compute_vector_momentum_from_scalar_momentum_multiNC(lam, I, sz, spacing): \"\"\"Computes the vector momentum from", "sure it is a numpy array csz = np.array([nr_of_images, dim]+list(csz))", "torch.cuda.Event(enable_timing=True) start.record() output = f(input) end.record() # Waits for everything", "torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif sched == 'difference': input = moving-target return input", "...] = apply_affine_transform_to_map(Ab[nrI, :], phi[nrI, ...]) return phiR def compute_normalized_gaussian(X,", "NaNs are present, False else \"\"\" return (x != x).any()", "padding=padding_size, bias=False,groups=2) else: net = \\ [ConvBnRel(self.dim, 20, 5, active_unit=active_unit,", "sums work (hence will be smaller than it could be,", "res size \"\"\" if (factor is None) or (factor >=", "returns the warped image of size BxCxXxYxZ \"\"\" dim =", "max_std = torch.max(stds) # omt_const = torch.abs(torch.log(max_std/stds))**omt_power # omt_const =", "...) :return: returns the identity map \"\"\" dim = len(sz)", "BxCxXxYxZ format :param spacing: list with spacing information [sx,sy,sz] :param", "tm_inv = torch.inverse(Ab[n, :, :dim]) Ab_inv[n, :, :dim] = tm_inv", "root_t + ext_s if os.path.isfile(abs_t_with_right_ext): if os.path.samefile(abs_s,abs_t_with_right_ext): # nothing to", "sz[2], sz[3]], dtype=dtype) elif dim == 3: id = np.zeros([nrOfI,", "get_scalar(v): if isinstance(v, float): return v elif isinstance(v, np.ndarray) and", "dictionary 1 :param d2: dictionary 2 :return: resulting dictionary \"\"\"", "keys 'name' and 'model_params' for par in ind_pars: model_pars[par['name']] =", "in range(dim): id[d]*=spacing[d] #id[d]*=2./(sz[d]-1) #id[d]-=1. # and now store it", ":, -1] = tensor[:, :, :, -2] + tensor[:, :,", "- 1) / (np.array(lowResSize[2::]) - 1) ########################################## Adaptive Net ###################################################3", "if necessary). :param A: numpy array :param dim: desired dimension", "low res parameterization \"\"\" # todo: check that this is", "space_normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0)", "store it in a dim+1 array and rescale by the", "the parameter vector \"\"\" return Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7)) def create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False):", "range(dim): # id[d] *= spacing[d] # if sz[d]%2==0: # #even", "g / g.sum() return g else: raise ValueError('Can only compute", "SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped = stn(I0, phi) return I1_warped def compute_warped_image(I0,", "g, ...] = gaussian_std_weights[g] tmp = AdaptVal(weights) if get_preweight_from_network: tmp.requires_grad", "given desired size :param I: Input image (expected to be", "== 'm_f_s_t': if debugging: self.net = nn.Conv2d(self.dim+2, self.dim, kernel_size, 1,", "init.constant(m.bias.data, 0.0) def init_weights(net, init_type='normal'): print('initialization method [%s]' % init_type)", "elif init_type == 'normal': net.apply(weights_init_normal) elif init_type == 'uniform': net.apply(weights_init_uniform)", "= True) self.get_net_sched() #self.net.register_backward_hook(bh) def get_net_sched(self, debugging=True, using_bn=True, active_unit='relu', using_sigmoid=False", "list with spacing information [sx,sy,sz] :param dtype: numpy data-type ('float32',", "list(img_sz) mask = AdaptVal(torch.zeros(*mask_sz)) if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3:", "class FcRel(nn.Module): # fc+ relu(option) def __init__(self, in_features, out_features, active_unit='relu'):", "# :param spacing: list with spacing information [sx,sy,sz] # :param", "def weights_init_uniform(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') !=", "format nrCxXxYxZ (nrC corresponds to dimension) :return: returns transformed map", "of given size. :param sz: just the spatial sizes (e.g.,", "if dim==2: tensor[:, :, -1,:] = tensor[:, :,-2,:] + tensor[:,", "array \"\"\" return (v.detach()).cpu().numpy() def cxyz_to_xyzc( v ): \"\"\" Takes", "input = moving-target return input def bh(m,gi,go): print(\"Grad Input\") print((torch.sum(gi[0].data),", "= nn.ReLU(inplace=True) elif active_unit == 'elu': self.active_unit = nn.ELU(inplace=True) else:", "[nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_d_s_f_t': if debugging:", ":return: returns the vector momentum \"\"\" fdt = fd.FD_torch(spacing) dim", "True else: tmp = Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return tmp def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False):", "(excluding B and C, i.e, 1 entry for 1D, 2", "elif dim == 2: g = np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) - np.power(X[1,:, :]", "- torch.matmul(tm_inv, Ab[n,:,dim]) inv_affine_param = Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0], -1) return inv_affine_param", "mus, stds) tensors[n,c] = torch.from_numpy(g) def weights_init_uniform(m): classname = m.__class__.__name__", "spline_order): import mermaid.image_sampling as IS sampler = IS.ResampleImage() low_res_image, _", "'uniform': net.apply(weights_init_uniform) elif init_type == 'xavier': net.apply(weights_init_xavier) elif init_type ==", "dimension here is image dim, not nrOfC nrOfC = sz[1]", "# def identity_map(sz,spacing,dtype='float32'): \"\"\" Returns an identity map. :param sz:", "== 1: id = id.reshape(1, sz[0]) # add a dummy", "np.min(spacing) spacing[spacing>min_sp]=min_sp return spacing def time_warped_function(f): def __time_warped_function(input=None): start =", "+ tensor[:, :, :,-2] - tensor[:, :, :,-3] if dim==3:", "individual_parameters_to_model_parameters(ind_pars): model_pars = dict() if type(ind_pars) == type(dict()): # should", "return phiR def apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies an affine transform to maps", "[sx,sy,sz] # :param dtype: numpy data-type ('float32', 'float64', ...) #", "sz[0]: raise ValueError('Incompatible number of affine transforms') if dim !=", "view of A of dimension dim (by adding dummy dimensions", "self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim", ".libraries.modules.stn_nd import STN_ND_BCXYZ from .data_wrapper import AdaptVal from .data_wrapper import", "= id[2] * spacing_ratio[2] # else: # raise ValueError('Only dimensions", "dimensions 1-3 are currently supported for the identity map') for", "same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid:", "mask = AdaptVal(torch.ones(*mask_sz))*mask_value if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range", "dictionary \"\"\" d = d1.copy() d.update(d2) return d def get_parameter_list_from_parameter_dict(pd):", "self.mask = Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1), requires_grad = True) self.get_net_sched() #self.net.register_backward_hook(bh) def", "None: x = self.active_unit(x) return x class AdpSmoother(nn.Module): \"\"\" a", "be overwritten with identity trans. :return: \"\"\" sz = Ab.size()", "spacing[d]*(sz[d]//2) # else: # #odd # id[d] -= spacing[d]*((sz[d]+1)//2) #", "!= -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_orthogonal(m): classname", "# else: # #odd # id[d] -= spacing[d]*((sz[d]+1)//2) # #", "dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) if", "'m_only' self.s = inputs['s'].detach() self.t = inputs['t'].detach() self.mask = Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim,", "(factor is None) or (factor >= 1): print('WARNING: Could not", "sz[2]], dtype=dtype) idnp[0, :, :, :] = id[0] idnp[1, :,", "for key in pd: pl.append(pd[key]) par_to_name_dict[pd[key]] = key return pl,", ":, :, :] = id[0] idnp[1, :, :, :] =", "get it into range [0,(sz-1)*spacing]^d id = np.array( id.astype(dtype) )", "0, :, :] = fdt.dXc(I)*lam m[:, 1, :, :] =", "in range(tensors.size()[0]): for c in range(tensors.size()[1]): dim = tensors[n][c].dim() sz", "C(Ax+b)+d = CAx+Cb+d = x; C = inv(A), d =", "spacing.size # spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio)) # sp_sz = [1]+[dim] +[1]*dim", "id[d] -= spacing[d]*(sz[d]//2) else: #odd id[d] -= spacing[d]*((sz[d]+1)//2) # and", "self.net = nn.Conv2d(self.dim + 2, self.dim, kernel_size, 1, padding=padding_size, bias=False)", "#todo: check that this is the correct way of doing", "active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid: net += [nn.Sigmoid()] self.net =", "dim == 1: id = id.reshape(1, sz[0]) # add a", "init_type='normal'): print('initialization method [%s]' % init_type) if init_type == 'rd_normal':", "= np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif dim == 3: id = np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else:", "this by creating a different view (effectively adding dimensions) Iw", "self.net = nn.Sequential(*net) elif self.net_sched =='m_f_s': if debugging: self.net =", "smaller than it could be, # but values of this", "raise ValueError('Only supports dimensions 1, 2, and 3.') phiR =", "centered_identity_map(sz[2::], spacing,dtype=dtype) return id def identity_map_multiN(sz,spacing,dtype='float32'): \"\"\" Create an identity", "\"\"\" Create a centered identity map (shifted so it is", "key in pd: pl.append(pd[key]) return pl def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same as", "use this map for resampling ID = compute_warped_image_multiNC(I, idDes, newspacing,", "phi) return I1_warped def _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order", ":param spacing: list with spacing information [sx,sy,sz] :param dtype: numpy", "tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing = np.min(spacing) # spacing_ratio", "compute low_res_size as factor was ' + str(factor)) return sz", "Ab: B x pars (batch size x param. vector) :return:", "x param. vector); will be overwritten with identity trans. :return:", "bn, using elu] # inputs should be a dictionary could", "Formally: C(Ax+b)+d = CAx+Cb+d = x; C = inv(A), d", "currently supported for the identity map') for n in range(nrOfI):", "if dim == 1: # idnp = np.zeros([1, sz[0]], dtype=dtype)", "low-res parametrization from image spacing. :param spacing: image spacing :param", "# idnp = np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) # idnp[0,", "f(input) end.record() # Waits for everything to finish running torch.cuda.synchronize()", "+ Ab[3] * phi[1, ...] + Ab[5] # a_12x+a_22y+b2 elif", "nr==2: return 1 elif nr==6: return 2 elif nr==12: return", "def centered_min_normalized_identity_map(sz, spacing, dtype='float32'): # \"\"\" # Returns a centered", "data type: ' + str( type(v.data))) def lift_to_dimension(A, dim): \"\"\"Creates", "phiR[1, ...] = Ab[1] * phi[0, ...] + Ab[4] *", "a view of A of dimension dim (by adding dummy", "x = self.active_unit(x) return x class AdpSmoother(nn.Module): \"\"\" a simple", "in a more meaningful way. \"\"\" from __future__ import print_function", "spline_order=1, zero_boundary=False, identity_map=None): \"\"\" Resample an image to a given", "spacing[d]*((sz[d]+1)//2) # and now store it in a dim+1 array", "1, 2, and 3.') def set_affine_transform_to_identity_multiN(Ab): \"\"\"Set the affine transforms", "not nrOfC nrOfC = sz[1] for c in range(nrOfC): #", "low_res_image, _ = sampler.downsample_image_to_size(I, spacing, low_res_size[2::],spline_order) return low_res_image def individual_parameters_to_model_parameters(ind_pars):", "sz[3], sz[4]], dtype=dtype) else: raise ValueError('Only dimensions 1-3 are currently", "if debugging: self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size, 1, padding=padding_size, bias=False)", "USE_CUDA import numpy as np from . import finite_differences as", "floating point: torch.FloatTensor, torch.cuda.FloatTensor # 64 - bit floating point:", "scalar to vector momentum in dimensions 1-3') return m def", "= id[1] idnp[2,:, :, :] = id[2] else: raise ValueError('Only", "warped in dimensions 1 to 3') def _get_low_res_spacing_from_spacing(spacing, sz, lowResSize):", "gi[2] class ConvBnRel(nn.Module): # conv + bn (optional) + relu", "spacing [dx,dy,dz] :return: returns the warped image of size BxCxXxYxZ", "\"\"\" Create vector field torch Parameter of given size :param", "get_warped_label_map(I0,phi,spacing) stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order ==", "index # # for d in range(dim): # id[d] *=", "dimensions if necessary). :param A: numpy array :param dim: desired", "= torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_param = torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim]", "if dim == 1: id = np.zeros([nrOfI, 1, sz[2]], dtype=dtype)", "Ab.view(Ab.shape[0], dim+1, dim).transpose(1, 2) Cd = Cd.view(Cd.shape[0], dim+1, dim).transpose(1, 2)", "if dim==1: id = np.mgrid[0:sz[0]] elif dim==2: id = np.mgrid[0:sz[0],0:sz[1]]", ":, :,-1] = tensor[:, :, :,-2] + tensor[:, :, :,-2]", "returns the parameter vector \"\"\" return Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7)) def create_ND_vector_field_parameter_multiN(sz,", "mu[2], 2.) / (2 * np.power(sig[2], 2.))) g = g", "\"\"\" Computes spacing for the low-res parameterization from image spacing", "number of images :param nrOfC: number of channels :return: returns", ":param sz: size of an image in BxCxXxYxZ format :param", "id = id.reshape(1,sz[0]) # add a dummy first index for", "a dim+1 array if dim==1: idnp = np.zeros([1, sz[0]], dtype=dtype)", "desiredSize.copy() desiredSize[0] = n_batch identity_map = identity_map[:n_batch] resampled, new_spacing =", "elif self.net_sched == 'm_d_s_f_t': if debugging: self.net = nn.Conv2d(self.dim +", "tmp = Parameter(tmp) return tmp def create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1, nrOfC=1): \"\"\"", "returns the identity map of dimension dimxXxYxZ # \"\"\" #", "it is centered around 0) :param sz: size of an", "init_type == 'uniform': net.apply(weights_init_uniform) elif init_type == 'xavier': net.apply(weights_init_xavier) elif", "supports dimensions 1, 2, and 3.') def set_affine_transform_to_identity_multiN(Ab): \"\"\"Set the", "pow ==2: mask = mask**2 if pow ==3: mask =", "== 'difference': input = moving-target return input def bh(m,gi,go): print(\"Grad", "tensor[:, :, :,-2] - tensor[:, :, :,-3] if dim==3: tensor[:,", "ValueError('Incompatible number of affine transforms') phiR = MyTensor(sz).zero_().type_as(phi) for nrI", "spacing, spline_order, zero_boundary, use_01_input) return Iw.view(I0.size()) def compute_warped_image_multiNC(I0, phi, spacing,", ":param spacing: image spacing :param sz: size of image :param", "0.1, \"nn interpolation is not precise\" else: raise ValueError(\" the", "# return new_var_list # def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: #", "=min_spacing/spacing # dim = spacing.size # spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio)) #", "3 for 3D) :return: returns a tuple: the downsampled image,", "spacing * ((sz[2::].astype('float') - 1.) / ( desiredSizeNC[2::].astype('float') - 1.))", "#id[d]-=1. # and now store it in a dim+1 array", "self.net = nn.Sequential(*net) def prepare_data(self, m, new_s): input=None if self.net_sched", "None: idDes = identity_map else: idDes = AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing))) #", "size of low re parameterization :return: returns spacing of low", "type(v.data) == torch.cuda.HalfTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else: raise ValueError('Unknown", "1 elif Ab.shape[1] == 6: dim = 2 elif Ab.shape[1]", "parameters that can be used as an input to an", "'m_only': if debugging: self.net = nn.Conv2d(2, 2, kernel_size, 1, padding=padding_size,", "= Ab.size() nr_of_images = sz[0] for nrI in range(nr_of_images): set_affine_transform_to_identity(Ab[nrI,", "which contains key value pairs for model parameters and converts", "== 3: id = np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else: raise ValueError('Only dimensions 1-3", "B and C, i.e, 1 entry for 1D, 2 for", "-1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02)", "for the identity map') # # min_spacing = np.min(spacing) #", "self.net_sched = 'm_only' self.s = inputs['s'].detach() self.t = inputs['t'].detach() self.mask", "init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_orthogonal(m): classname = m.__class__.__name__", "= resample_image(I, spacing, desiredSize, spline_order=spline_order, zero_boundary=zero_boundary, identity_map=identity_map) return resampled def", "min_sp = np.min(spacing) spacing[spacing>min_sp]=min_sp return spacing def time_warped_function(f): def __time_warped_function(input=None):", "dimxXxYxZ \"\"\" dim = len(sz) csz = np.array(sz) # just", "__init__(self, in_features, out_features, active_unit='relu'): super(FcRel, self).__init__() self.fc = nn.Linear(in_features, out_features)", "torch.cuda.DoubleTensor # 16 - bit floating point: torch.HalfTensor, torch.cuda.HalfTensor #", "be, # but values of this size should not occur", "create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False): \"\"\"Create vector field torch Parameter of given size.", "Parameter vectors B x pars (batch size x param. vector);", "= tensor[:, :, :, -2] + tensor[:, :, :, -2]", "number of images m = create_ND_vector_field_variable_multiN(sz[2::], nrOfI) # attention that", "sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) return mask.detach() def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std", "elif spline_order == 1: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=True, use_01_input=use_01_input)", "[%s] is not implemented' % init_type) def organize_data(moving, target, sched='depth_concat'):", "if dim not in [1, 2, 3]: raise ValueError('Only supports", "spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image. :param I0: image to", "space_normal(m.weight.data) elif classname.find('Linear') != -1: space_normal(m.weight.data) elif classname.find('BatchNorm2d') != -1:", "of vector elements :return: returns the parameter vector \"\"\" return", "Cd.view(Cd.shape[0], dim+1, dim).transpose(1, 2) updated_param = torch.zeros_like(Ab) for n in", "!= -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('Linear') != -1: init.orthogonal(m.weight.data, gain=1)", "do here, these are already the same file return else:", "def resample_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" Resample an", ":, -3] def get_resampled_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\"", "= os.path.splitext(abs_t) abs_t_with_right_ext = root_t + ext_s if os.path.isfile(abs_t_with_right_ext): if", "transformation of the form y=Ax+b stored in a column vector.", "the warped image of size BxCxXxYxZ \"\"\" dim = I0.dim()-2", "+ list(desiredSize)) newspacing = spacing * ((sz[2::].astype('float') - 1.) /", "idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0, :, :] =", "bias=False) else: net = \\ [ConvBnRel(self.dim + 2, 20, 5,", "an optimizer. :param pd: parameter dictionary :return: list of parameters", "[ConvBnRel(self.dim + 1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim,", "map \"\"\" dim = len(sz)-2 nrOfI = int(sz[0]) if dim", "def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\" Create vector field torch Parameter of", "identity map') for n in range(nrOfI): id[n, ...] = centered_identity_map(sz[2::],", "spacing) return m def compute_vector_momentum_from_scalar_momentum_multiN(lam, I, nrOfI, sz, spacing): \"\"\"Computes", "v def get_scalar(v): if isinstance(v, float): return v elif isinstance(v,", "\"\"\" a simple conv. implementation, generate displacement field \"\"\" def", "# now get it into range [0,(sz-1)*spacing]^d # id =", "vector :param phi: map; format nrCxXxYxZ (nrC corresponds to dimension)", "dim == 1: # id = np.mgrid[0:sz[0]] # elif dim", ":param I0: image to warp, image size XxYxZ :param phi:", "parametrization from image spacing. :param spacing: image spacing :param sz:", "1e-7) def create_ND_vector_field_variable(sz): \"\"\"Create vector field torch Variable of given", "updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return updated_param def apply_affine_transform_to_map(Ab,phi): \"\"\"Applies an affine transform to", "+ Ab[9] phiR[1, ...] = Ab[1] * phi[0, ...] +", "id[1] idnp[2, :, :, :] = id[2] else: raise ValueError('Only", "a list of parameters that can be used as an", "supported') if spline_order == 0: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False,", "def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05): \"\"\"generate a smooth weight mask for the", "if (factor is None): print('WARNING: Could not compute low_res_size as", "bias=bias) else: self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding=padding,bias=bias) #y", "= Ab.view(Ab.shape[0], dim+1, dim).transpose(1,2) Ab_inv = torch.zeros_like(Ab) for n in", "new_var_list = [var*spacing_ratio_t if var is not None else None", "organize_data(moving, target, sched='depth_concat'): if sched == 'depth_concat': input = torch.cat([moving,", "moving-target return input def bh(m,gi,go): print(\"Grad Input\") print((torch.sum(gi[0].data), torch.sum(gi[1].data))) print(\"Grad", "- mu[1], 2.) / (2 * np.power(sig[1], 2.)) -np.power(X[2,:, :,", "dimensions 1, 2, and 3.') def set_affine_transform_to_identity(Ab): \"\"\"Sets the affine", "#even id[d] -= spacing[d]*(sz[d]//2) else: #odd id[d] -= spacing[d]*((sz[d]+1)//2) #", "get it into range [0,(sz-1)*spacing]^d # id = np.array(id.astype(dtype)) #", "= np.min(spacing) # spacing_ratio =min_spacing/spacing # dim = spacing.size #", "sz[1], sz[2]], dtype=dtype) idnp[0,:, :, :] = id[0] idnp[1,:, :,", ":] = fdt.dYc(I)*lam m[:, 2, :, :, :] = fdt.dZc(I)*lam", "size nrOfIxdimxXxYxZ \"\"\" dim = len(sz) csz = np.array(sz) #", "information [sx,sy,sz] # :param dtype: numpy data-type ('float32', 'float64', ...)", "from .data_wrapper import AdaptVal from .data_wrapper import MyTensor from .", "(by adding dummy dimensions if necessary). :param A: numpy array", "3 if dim not in [1, 2, 3]: raise ValueError('Only", "dim = len(sz) - 2 nrOfI = sz[0] if dim", "of BxCxXxYxZ format) :param spacing: array describing the spatial spacing", "self.active_unit = nn.ReLU(inplace=True) elif active_unit == 'elu': self.active_unit = nn.ELU(inplace=True)", "if nr_of_images != sz[0]: raise ValueError('Incompatible number of affine transforms')", "== dim: return A else: return A.reshape([1]*(dim-current_dim)+list(A.shape)) def get_dim_of_affine_transform(Ab): \"\"\"Returns", "if dim == 1: id = np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif dim ==", "mask = AdaptVal(torch.zeros(*mask_sz)) if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range", "updated_param = torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_param = torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim])", "I1_warped def _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not in", "= phi.size() dim = get_dim_of_affine_transform(Ab[0,:]) nr_of_images = Ab.size()[0] if nr_of_images", "not be imported (only supported in CUDA at the moment).", "# idnp[1, :, :, :] = id[1] * spacing_ratio[1] #", "np.array( id.astype(dtype) ) if dim==1: id = id.reshape(1,sz[0]) # add", "the label warping method is not implemented\") return warped_label_map def", "sz[0]], dtype=dtype) idnp[0,:] = id[0] elif dim==2: idnp = np.zeros([2,", "it is a numpy array csz = np.array([nrOfI, dim]+list(csz)) if", "phi[0, ...] + Ab[5] * phi[1, ...] + Ab[8] *", "is not None: # todo will remove, currently fix for", "init.constant(m.bias.data, 0.0) def weights_init_orthogonal(m): classname = m.__class__.__name__ print(classname) if classname.find('Conv')", "print('initialization method [%s]' % init_type) if init_type == 'rd_normal': net.apply(weights_init_rd_normal)", "len(sz)-2: raise ValueError('Incompatible number of affine transforms') phiR = MyTensor(sz).zero_().type_as(phi)", "in range(nrOfI): id[n,...] = identity_map(sz[2::],spacing,dtype=dtype) return id def centered_identity_map(sz, spacing,", "torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif v.data.dtype == torch.HalfTensor or type(v.data) ==", "<gh_stars>100-1000 \"\"\"Various utility functions. .. todo:: Reorganize this package in", "# \"\"\" # dim = len(sz) # if dim ==", "updated_param[n,:,:dim] = tm_param updated_param[n,:,dim] = torch.matmul(Cd[n,:,:dim], Ab[n,:,dim]) +Cd[n,:,dim] updated_param =", "re parameterization :return: returns spacing of low res parameterization \"\"\"", "# def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing = np.min(spacing)", "image spacing :param spacing: image spacing :param sz: size of", "vector field of size dimxXxYxZ \"\"\" dim = len(sz) csz", "np.ones(dim) centered_id = centered_identity_map(sz,spacing) g = compute_normalized_gaussian(centered_id, mus, stds) tensors[n,c]", "isinstance(tensors, Variable): space_normal(tensors.data, std=std) return tensors for n in range(tensors.size()[0]):", "...] + Ab[3] * phi[1, ...] + Ab[5] # a_12x+a_22y+b2", "pd: pl.append(pd[key]) par_to_name_dict[pd[key]] = key return pl, par_to_name_dict def remove_infs_from_variable(v):", "* Ab[0] + Ab[1] elif dim == 2: phiR[0, ...]", "spacing[d] # if sz[d]%2==0: # #even # id[d] -= spacing[d]*(sz[d]//2)", "array and rescale by the ratio # if dim ==", "1) / (np.array(lowResSize[2::]) - 1) ########################################## Adaptive Net ###################################################3 def", "not reverse: self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=padding, bias=bias)", "+ Ab[5] # a_12x+a_22y+b2 elif dim == 3: phiR[0, ...]", "[5,10] in 2D, [5,10,10] in 3D) :param nrOfI: number of", "id[1] idnp[2,:, :, :] = id[2] else: raise ValueError('Only dimensions", "= \\ [ConvBnRel(self.dim, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,self.dim, 5,", "2: id = np.mgrid[0:sz[0], 0:sz[1]] elif dim == 3: id", "== 'list_concat': input = torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif sched == 'difference': input", "CUDA at the moment). ' 'Some functionality may not be", "fdt.dXc(I)*lam m[:, 1, :, :] = fdt.dYc(I)*lam elif dim ==", "column vectors (batch size x param. vector) :param phi: maps;", "def create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False): \"\"\"Create vector field torch Parameter of given", "spacing_ratio_t.view(*sp_sz) # new_var_list = [var*spacing_ratio_t if var is not None", "None def forward(self, x): x = self.fc(x) if self.active_unit is", "size). :param Ab: affine transform parameter column vectors (batch size", "== 2: id = np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif dim == 3: id", "in range(nrOfC): # loop over all the channels and add", "= dict() if type(ind_pars) == type(dict()): # should already be", "(np.array(lowResSize[2::]) - 1) ########################################## Adaptive Net ###################################################3 def space_normal(tensors, std=0.1):", "scalar momentum, batchxXxYxZ :param I: image, batchXxYxZ :param sz: size", "self.net_sched == 'm_f_s': input = organize_data(m,self.s,sched='depth_concat') elif self.net_sched == 'm_d_s':", "properly in the Fourier transform later!\\n\\n') return lowResSize def get_res_spacing_from_spacing(spacing,", "and now store it in a dim+1 array and rescale", "remove them') if current_dim == dim: return A else: return", "of appropriate dimension \"\"\" current_dim = len(A.shape) if current_dim >", "idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0,:, :] = id[0]", "np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else: raise ValueError('Only dimensions 1-3 are currently supported for", "vector field of size nrOfIxnrOfCxXxYxZ \"\"\" csz = np.array(sz) #", "newspacing def get_res_size_from_size(sz, factor): \"\"\" Returns the corresponding low-res size", "Ab_inv = torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_inv = torch.inverse(Ab[n,", "1-3 are currently supported for the identity map') # #", "# fc+ relu(option) def __init__(self, in_features, out_features, active_unit='relu'): super(FcRel, self).__init__()", "phiR def apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies an affine transform to maps (for", "elif dim == 3: # id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]]", "= mask*mask*mask return mask # def compute_omt_const(stds,param,dim): # omt_power =", "get_field_from_external_network: tmp = MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad = True else: tmp =", ":param phi: map for the warping, size dimxXxYxZ :param spacing:", "checkNan(x): \"\"\"\" input should be list of Variable \"\"\" return", "(will be overwritten with the identity transform) :return: \"\"\" dim", "return I1_warped def _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not", "Ab[4] # a_11x+a_21y+b1 phiR[1, ...] = Ab[1] * phi[0, ...]", "np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) # idnp[0, :, :, :]", "return resampled def resample_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\"", "1, 2, and 3.') phiR = MyTensor(sz).zero_().type_as(phi) if dim ==", "functionality may not be available.') def my_hasnan(x): \"\"\"Check if any", "+ relu def __init__(self, in_channels, out_channels, kernel_size, stride=1, active_unit='relu', same_padding=False,", "from .libraries.modules.stn_nd import STN_ND_BCXYZ from .data_wrapper import AdaptVal from .data_wrapper", "spacing_ratio[0] # elif dim == 2: # idnp = np.zeros([2,", "+ Ab[1] elif dim == 2: phiR[0, ...] = Ab[0]", "torch.matmul(tm_inv, Ab[n,:,dim]) inv_affine_param = Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0], -1) return inv_affine_param def", "dim = get_dim_of_affine_transform(Ab) if dim==1: Ab.zero_() Ab[0]=1. elif dim==2: Ab.zero_()", "return spacing def time_warped_function(f): def __time_warped_function(input=None): start = torch.cuda.Event(enable_timing=True) end", "returns spacing of low res parameterization \"\"\" # todo: check", "nn.Conv2d(self.dim+2, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net = \\", "sz[1] desiredSizeNC = np.array([nrOfI, nrOfC] + list(desiredSize)) newspacing = spacing", ":return: returns vector field of size dimxXxYxZ \"\"\" dim =", ") if dim==1: id = id.reshape(1,sz[0]) # add a dummy", "classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_orthogonal(m):", ":, :] = fdt.dZc(I)*lam else: raise ValueError('Can only convert scalar", "elif Ab.shape[1]==12: dim = 3 if dim not in [1,", "creating a different view (effectively adding dimensions) Iw = compute_warped_image_multiNC(I0.view(torch.Size([1,", "size BxCxXxYxZ :param phi: map for the warping, size BxdimxXxYxZ", "nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding=padding,bias=bias) #y = \\frac{x - mean[x]}{", "= tensor[:, :, :,-2] + tensor[:, :, :,-2] - tensor[:,", "dim (by adding dummy dimensions if necessary). :param A: numpy", "(effectively adding dimensions) Iw = compute_warped_image_multiNC(I0.view(torch.Size([1, 1] + list(I0.size()))), phi.view(torch.Size([1]", "idnp[0, :, :, :] = id[0] idnp[1, :, :, :]", "if sched == 'nn': warped_label_map = compute_warped_image_multiNC(label_map, phi, spacing,spline_order=0,zero_boundary=True) #", "compute_warped_image_multiNC(I, idDes, newspacing, spline_order, zero_boundary) return ID if not is_numpy", ":param mu: array indicating the mean :param sig: array indicating", "weights_init_normal(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1:", "=[a1,a2,a3], the parameter vector is simply [a1;a2;a3;b], i.e., all columns", "= np.array([nrOfI,nrOfC]+list(csz)) return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def centered_identity_map_multiN(sz, spacing, dtype='float32'): \"\"\" Create", "s_m def get_warped_label_map(label_map, phi, spacing, sched='nn'): if sched == 'nn':", "\"\"\"Takes a dictionary which contains key value pairs for model", "the spatial dimensions, i.e., XxYxZ :param spacing: list with spacing", "param. vector); will be overwritten with identity trans. :return: \"\"\"", "[ConvBnRel(self.dim +1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5,", ":param pd: parameter dictionary :return: list of parameters \"\"\" pl", "# def centered_min_normalized_identity_map(sz, spacing, dtype='float32'): # \"\"\" # Returns a", "the form y=Ax+b stored in a column vector. For A", "not None: x = self.active_unit(x) return x class AdpSmoother(nn.Module): \"\"\"", "elif dim==2: Ab.zero_() Ab[0]=1. Ab[3]=1. elif dim==3: Ab.zero_() Ab[0]=1. Ab[4]=1.", "dim = len(mu) if dim == 1: g = np.exp(-np.power(X[0,", "= sm.smooth(mask) if pow ==2: mask = mask**2 if pow", "= (kernel_size-1)//2 if self.net_sched == 'm_only': if debugging: self.net =", "Ab.shape[1] == 12: dim = 3 if dim not in", "os.remove(abs_t_with_right_ext) # now we can do the symlink os.symlink(abs_s,abs_t_with_right_ext) def", "return model_pars def compute_vector_momentum_from_scalar_momentum_multiNC(lam, I, sz, spacing): \"\"\"Computes the vector", "res parameterization \"\"\" # todo: check that this is the", "they come from the optimizer # (i.e., list and each", "= True else: tmp = Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return tmp def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights,", "init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_normal(m): classname = m.__class__.__name__", ":return: returns the identity map of dimension dimxXxYxZ \"\"\" dim", "is_numpy = True sz = np.array(list(I.size())) # check that the", "as IS sampler = IS.ResampleImage() low_res_image, _ = sampler.downsample_image_to_size(I, spacing,", "'m_d_s': input = organize_data(m, new_s, sched='depth_concat') elif self.net_sched == 'm_f_s_t':", "[0,0] >>> print(compute_normalized_gaussian(X, mu, sig) \"\"\" dim = len(mu) if", "init.orthogonal(m.weight.data, gain=1) elif classname.find('Linear') != -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('BatchNorm2d')", "from builtins import range import torch from torch.nn.parameter import Parameter", "self.t, sched='depth_concat') elif self.net_sched == 'm_f_s_t': input = organize_data(m, self.s,", "new_var_list = var_list # return new_var_list # def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): #", "dim == 2: m[:, 0, :, :] = fdt.dXc(I)*lam m[:,", "dim]+list(csz)) if get_field_from_external_network: tmp = MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad = True else:", "implements this by creating a different view (effectively adding dimensions)", "==2: v = v.permute(0,2,3,1) if dim ==3: v = v.permute(0,2,3,4,1)", "nrOfIxdimxXxYxZ \"\"\" dim = len(sz) csz = np.array(sz) # just", "tensors for n in range(tensors.size()[0]): for c in range(tensors.size()[1]): dim", "weights[:, g, ...] = gaussian_std_weights[g] tmp = AdaptVal(weights) if get_preweight_from_network:", "dummy first index for d in range(dim): id[d]*=spacing[d] #id[d]*=2./(sz[d]-1) #id[d]-=1.", "image :return: returns the vector momentum \"\"\" nrOfI = sz[0]", "-= spacing[d]*((sz[d]+1)//2) # and now store it in a dim+1", "images m = create_ND_vector_field_variable_multiN(sz[2::], nrOfI) # attention that the second", "'name' and 'model_params' for par in ind_pars: model_pars[par['name']] = par['model_params']", "for the warping, size dimxXxYxZ :param spacing: image spacing [dx,dy,dz]", "if do_transform: # min_spacing = np.min(spacing) # spacing_ratio =spacing/min_spacing #", "C, i.e, 1 entry for 1D, 2 for 2D, and", "sure it is a numpy array csz = np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights", "else: lowResSize[2::] = (np.ceil((np.array(sz[2:]) * np.array(factor)))).astype('int16') if lowResSize[-1] % 2", "Ab[0] + Ab[1] elif dim == 2: phiR[0, ...] =", "== 'normal': net.apply(weights_init_normal) elif init_type == 'uniform': net.apply(weights_init_uniform) elif init_type", "3: id = np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else: raise ValueError('Only dimensions 1-3 are", "# min_spacing = np.min(spacing) # spacing_ratio =min_spacing/spacing # dim =", "x class AdpSmoother(nn.Module): \"\"\" a simple conv. implementation, generate displacement", "** omt_power) # omt_const = omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) # sz = [1]+", "in gaussian_std_weights] for g in range(nr_of_mg_weights): weights[:, g, ...] =", "[nn.Sigmoid()] self.net = nn.Sequential(*net) def prepare_data(self, m, new_s): input=None if", "spacing of low res parameterization \"\"\" #todo: check that this", "add the results m = m + compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c, ...],", "absolute_import # from builtins import str # from builtins import", "if sched =='w_K_w': gaussian_std_weights = [torch.sqrt(std_w) for std_w in gaussian_std_weights]", "2: idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0, :, :]", "# min_spacing = np.min(spacing) # spacing_ratio = spacing/min_spacing # #", "dim == 2: id = np.zeros([nrOfI, 2, sz[2], sz[3]], dtype=dtype)", "* ((sz[2::].astype('float') - 1.) / ( desiredSizeNC[2::].astype('float') - 1.)) ###########################################", "== 'relu': self.active_unit = nn.ReLU(inplace=True) elif active_unit == 'elu': self.active_unit", "np.min(spacing) # spacing_ratio =spacing/min_spacing # dim = spacing.size # spacing_ratio_t", "resulting dictionary \"\"\" d = d1.copy() d.update(d2) return d def", "of affine parameters \"\"\" dim =0 if Ab.shape[1] == 2:", "* phi[1, ...] + Ab[6] * phi[2, ...] + Ab[9]", "Ab[3]=1. elif dim==3: Ab.zero_() Ab[0]=1. Ab[4]=1. Ab[8]=1. else: raise ValueError('Only", "returns spacing of low res parameterization \"\"\" #todo: check that", "idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0, :] = id[0] elif", ":] = fdt.dYc(I)*lam elif dim == 3: m[:, 0, :,", "use_01_input) return Iw.view(I0.size()) def compute_warped_image_multiNC(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True):", "id = np.mgrid[0:sz[0], 0:sz[1]] # elif dim == 3: #", "zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped =", "other. :param Ab: parameter vector :return: dimensionality of transform (1,2,or", "elif dim == 2: id = np.mgrid[0:sz[0], 0:sz[1]] elif dim", "get_dim_of_affine_transform(Ab) if dim==1: Ab.zero_() Ab[0]=1. elif dim==2: Ab.zero_() Ab[0]=1. Ab[3]=1.", "par_to_name_dict[pd[key]] = key return pl, par_to_name_dict def remove_infs_from_variable(v): # 32", "centered_identity_map(sz,spacing) g = compute_normalized_gaussian(centered_id, mus, stds) tensors[n,c] = torch.from_numpy(g) def", "ValueError('Can only compute Gaussians in dimensions 1-3') def _compute_warped_image_multiNC_1d(I0, phi,", "as an input to an optimizer. :param pd: parameter dictionary", "...] + Ab[5] * phi[1, ...] + Ab[8] * phi[2,", "return torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif v.data.dtype == torch.HalfTensor or type(v.data)", "identity_map is not None: idDes = identity_map else: idDes =", "classname.find('Conv') != -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('Linear') != -1: init.xavier_normal(m.weight.data,", "to considering gamma=1 and beta=0 as constants. self.bn = nn.BatchNorm2d(out_channels,", "= nn.Sequential(*net) elif self.net_sched =='m_f_s': if debugging: self.net = nn.Conv2d(self.dim+1,", "be even: fix properly in the Fourier transform later!\\n\\n') return", "m = create_ND_vector_field_variable_multiN(sz[2::], nrOfI) # attention that the second dimension", "= self.conv(x) if self.bn is not None: x = self.bn(x)", "size :param I: Input image (expected to be of BxCxXxYxZ", "-1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_normal(m): classname =", "not None: x = self.active_unit(x) return x class FcRel(nn.Module): #", "Variable of given size :param sz: just the spatial sizes", "numpy array csz = np.array([nrOfI, dim]+list(csz)) if get_field_from_external_network: tmp =", "= np.min(spacing) # spacing_ratio = spacing/min_spacing # # # #", "# elif dim == 2: # id = np.mgrid[0:sz[0], 0:sz[1]]", "batch size). :param Ab: affine transform parameter column vectors (batch", "numpy array csz = np.array([nr_of_images, dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0., 1e-7) def", "1 if dim not in [1,2,3]: raise ValueError('Only supports dimensions", "+ tensor[:, :, -2, :] - tensor[:, :, -3, :]", "% init_type) if init_type == 'rd_normal': net.apply(weights_init_rd_normal) elif init_type ==", "batchXxYxZ :param sz: size of image :param spacing: spacing of", "dim = len(sz) - 1 if dim not in [1,2,3]:", "\"\"\"Computes inverse of affine transformation. Formally: C(Ax+b)+d = CAx+Cb+d =", "fix properly in the Fourier transform later!\\n\\n') return lowResSize def", "may not be available.') def my_hasnan(x): \"\"\"Check if any input", "elif classname.find('Linear') != -1: init.normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data,", "the channels and add the results m = m +", "]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) return mask.detach() def", "- 1 if dim not in [1,2,3]: raise ValueError('Only supports", "dimensions, but not remove them') if current_dim == dim: return", "= Ab[0] * phi[0, ...] + Ab[2] * phi[1, ...]", "parameter column vector :param phi: map; format nrCxXxYxZ (nrC corresponds", "\"\"\" Returns an identity map. :param sz: just the spatial", "1.0, 0.02) init.constant(m.bias.data, 0.0) def init_weights(net, init_type='normal'): print('initialization method [%s]'", "Ab.shape[1] == 6: dim = 2 elif Ab.shape[1] == 12:", "the warping, size dimxXxYxZ :param spacing: image spacing [dx,dy,dz] :return:", "elif self.net_sched == 'm_f_s_t': input = organize_data(m, self.s, sched='depth_concat') input", "warped_label_map = compute_warped_image_multiNC(label_map, phi, spacing,spline_order=0,zero_boundary=True) # check if here should", "0:sz[1], 0:sz[2]] # else: # raise ValueError('Only dimensions 1-3 are", "\"\"\" if isinstance(tensors, Variable): space_normal(tensors.data, std=std) return tensors for n", "torch.FloatTensor, torch.cuda.FloatTensor # 64 - bit floating point: torch.DoubleTensor, torch.cuda.DoubleTensor", "not compute low_res_size as factor was ' + str(factor)) return", "bn=using_bn), ConvBnRel(20, self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid: net", "inputs, dim, net_sched=None): # settings should include [using_bias, using bn,", "(2 * np.power(sig[0], 2.)) -np.power(X[1,:, :, :] - mu[1], 2.)", "else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped = stn(I0, phi) return", "for d in range(dim): id[d] *= spacing[d] if sz[d]%2==0: #even", ":param nrOfI: number of images :return: returns vector field of", "elif dim == 3: id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] else:", "# idnp[0, :, :, :] = id[0] * spacing_ratio[0] #", "= [1,1]+ list(img_sz) mask = AdaptVal(torch.ones(*mask_sz))*mask_value if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1", "spacing: spx spy spz :param desiredSize: B C X Y", "False if not isinstance(I, torch.Tensor): I = torch.Tensor(I) is_numpy =", "lowResSize): \"\"\" Computes spacing for the low-res parameterization from image", "nn.BatchNorm2d(out_channels, eps=0.0001, momentum=0, affine=True) if bn else None if active_unit", "len(A.shape) if current_dim > dim: raise ValueError('Can only add dimensions,", "STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order == 1: stn =", "ID.numpy(), newspacing def get_res_size_from_size(sz, factor): \"\"\" Returns the corresponding low-res", "gaussian_std s_m = sf.SmootherFactory(sz, spacing).create_smoother(s_m_params) return s_m def get_warped_label_map(label_map, phi,", "is not None: x = self.active_unit(x) return x class FcRel(nn.Module):", "__future__ import print_function from __future__ import absolute_import # from builtins", "type(ind_pars) == type(dict()): # should already be in the right", "is_numpy = False if not isinstance(I, torch.Tensor): I = torch.Tensor(I)", "-1: init.orthogonal(m.weight.data, gain=1) elif classname.find('Linear') != -1: init.orthogonal(m.weight.data, gain=1) elif", "out_channels, kernel_size, stride, padding=padding, bias=bias) else: self.conv = nn.ConvTranspose2d(in_channels, out_channels,", "np.zeros([nrOfI, 1, sz[2]], dtype=dtype) elif dim == 2: id =", ":] - mu[1], 2.) / (2 * np.power(sig[1], 2.)) -np.power(X[2,:,", "get_nn_interpolation except ImportError: print('WARNING: nn_interpolation could not be imported (only", "spacing: image spacing :param sz: size of image :param lowResSize:", "of dimensions corresponding to an affine transformation of the form", "1: phiR = phi * Ab[0] + Ab[1] elif dim", "1: # id = id.reshape(1, sz[0]) # add a dummy", "should not occur in practice anyway sz = v.size() reduction_factor", "dictionary 2 :return: resulting dictionary \"\"\" d = d1.copy() d.update(d2)", "id.reshape(1, sz[0]) # add a dummy first index # #", "point: torch.FloatTensor, torch.cuda.FloatTensor # 64 - bit floating point: torch.DoubleTensor,", "spacing [dx,dy,dz] :return: returns the warped image of size XxYxZ", "len(sz) m = create_ND_vector_field_variable_multiN(sz, nrOfI) if dim == 1: m[:,", "nrOfI = sz[0] if dim == 1: id = np.zeros([nrOfI,", "todo will remove, currently fix for symmetric training if I.shape[0]", "CAx+Cb+d = x; C = inv(A), d = -Cb :param", "pl def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same as get_parameter_list_from_parameter_dict; but also returns a", "idDes, newspacing, spline_order, zero_boundary) return ID if not is_numpy else", "0.02) init.constant(m.bias.data, 0.0) def weights_init_normal(m): classname = m.__class__.__name__ # print(classname)", "m, new_s): input=None if self.net_sched == 'm_only': input = m", "in range(nr_of_mg_weights): weights[:, g, ...] = gaussian_std_weights[g] tmp = AdaptVal(weights)", "array :return: True if NaNs are present, False else \"\"\"", "affine transforms to the identity (in the case of arbitrary", ":param sz: size (high-res) :param factor: low-res factor (needs to", "dim not in [1, 2, 3]: raise ValueError('Only supports dimensions", "(np.ceil((np.array(sz[2::]) * factor))).astype('int16') return low_res_sz def _compute_low_res_image(I, spacing, low_res_size, spline_order):", "2: m[:, 0, :, :] = fdt.dXc(I)*lam m[:, 1, :,", "weights_init_rd_normal(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1:", "Ab_inv[n, :, dim] = - torch.matmul(tm_inv, Ab[n,:,dim]) inv_affine_param = Ab_inv.transpose(1,", "of low re parameterization :return: returns spacing of low res", "return get_warped_label_map(I0,phi,spacing) stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order", "size \"\"\" if (factor is None) or (factor >= 1):", "only be warped in dimensions 1 to 3') def _get_low_res_spacing_from_spacing(spacing,", "anyway sz = v.size() reduction_factor = np.prod(np.array(sz)) condition = True", "parameter vector (will be overwritten with the identity transform) :return:", "...] = Ab[1] * phi[0, ...] + Ab[4] * phi[1,", "return x class AdpSmoother(nn.Module): \"\"\" a simple conv. implementation, generate", "type(v.data))) def lift_to_dimension(A, dim): \"\"\"Creates a view of A of", "for the low-res parametrization from image spacing. :param spacing: image", "factor): \"\"\"Returns the corresponding low-res size from a (high-res) sz.", "it in a dim+1 array if dim == 1: idnp", "data-type ('float32', 'float64', ...) # :return: returns the identity map", "inverse of affine transformation. Formally: C(Ax+b)+d = CAx+Cb+d = x;", "= np.array([nrOfI,nr_of_mg_weights]+list(csz)) weights = torch.empty(*csz) # set the default if", "list(img_sz) mask = AdaptVal(torch.ones(*mask_sz))*mask_value if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3:", "mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) if pow", "dim ==2: v = v.permute(0,2,3,1) if dim ==3: v =", "res size \"\"\" if (factor is None): print('WARNING: Could not", "weights_init_xavier(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1:", "using elu] # inputs should be a dictionary could contain", "1, padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim + 2,", "== 'm_d_s': if debugging: self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size, 1,", "# now we can do the symlink os.symlink(abs_s,abs_t_with_right_ext) def combine_dict(d1,d2):", "var is not None else None for var in var_list]", "sz[2::], spacing) return m def compute_vector_momentum_from_scalar_momentum_multiN(lam, I, nrOfI, sz, spacing):", "for d in range(dim): # id[d] *= spacing[d] # if", "np.power(X[1,:, :] - mu[1], 2.) / (2 * np.power(sig[1], 2.)))", "id[2] else: raise ValueError('Only dimensions 1-3 are currently supported for", "net += [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched == 'm_d_s_f_t':", "a more meaningful way. \"\"\" from __future__ import print_function from", "-1: init.normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data,", "input = torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif sched == 'difference': input = moving-target", "was ' + str(factor)) return sz else: lowResSize = np.array(sz)", "+1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5, active_unit=active_unit,", "# # def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing =", "active_unit='relu', same_padding=False, bn=False, reverse=False, bias=False): super(ConvBnRel, self).__init__() padding = int((kernel_size", "* (np.array(sz[2::])-1) / (np.array(lowResSize[2::])-1) def _get_low_res_size_from_size(sz, factor): \"\"\"Returns the corresponding", "field torch Variable of given size. :param sz: just the", "a torch array and returns it as a numpy array", "for n in range(tensors.size()[0]): for c in range(tensors.size()[1]): dim =", "now get it into range [0,(sz-1)*spacing]^d # id = np.array(id.astype(dtype))", "3: g = np.exp(-np.power(X[0,:, :, :] - mu[0], 2.) /", "id[0] * spacing_ratio[0] # idnp[1, :, :] = id[1] *", "mu, sig): \"\"\"Computes a normalized Gaussian. :param X: map with", "MyTensor from . import smoother_factory as sf from .data_wrapper import", "\"\"\" dim = len(sz) if dim==1: id = np.mgrid[0:sz[0]] elif", "spacing).create_smoother(s_m_params) return s_m def get_warped_label_map(label_map, phi, spacing, sched='nn'): if sched", "value pairs for model parameters and converts it into a", "init_weights(net, init_type='normal'): print('initialization method [%s]' % init_type) if init_type ==", "get_parameter_list_from_parameter_dict(pd): \"\"\"Takes a dictionary which contains key value pairs for", "\"\"\" nrOfI = sz[0] # number of images m =", "\\ [ConvBnRel(self.dim + 2, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,", "=0.05,pow=2): \"\"\"generate a smooth weight mask for the omt \"\"\"", "Iw.view(I0.size()) def compute_warped_image_multiNC(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image.", ".data_wrapper import MyTensor from . import smoother_factory as sf from", "dim==2: Ab.zero_() Ab[0]=1. Ab[3]=1. elif dim==3: Ab.zero_() Ab[0]=1. Ab[4]=1. Ab[8]=1.", "gaussian_std_weights = [torch.sqrt(std_w) for std_w in gaussian_std_weights] for g in", "v: torch array :return: numpy array \"\"\" dim = len(v.shape)-2", "for resampling ID = compute_warped_image_multiNC(I, idDes, newspacing, spline_order, zero_boundary) return", "np.zeros([nrOfI, 2, sz[2], sz[3]], dtype=dtype) elif dim == 3: id", "a column vector. For A =[a1,a2,a3], the parameter vector is", "momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum, BxCxXxYxZ :param I:", "dim == 3: g = np.exp(-np.power(X[0,:, :, :] - mu[0],", "0: lowResSize[-1] -= 1 print( '\\n\\nWARNING: forcing last dimension to", "if active_unit == 'relu': self.active_unit = nn.ReLU(inplace=True) elif active_unit ==", "= sz[0] for nrI in range(nr_of_images): set_affine_transform_to_identity(Ab[nrI, :]) def get_inverse_affine_param(Ab):", "0 if not reverse: self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride,", "-3] def get_resampled_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" :param", ", kernel_size=5): # return the self.net and self.net_input padding_size =", "vector field torch Variable of given size. :param sz: just", "classname.find('Linear') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('BatchNorm2d') != -1:", ":,:, -1,:, :] = tensor[:, :, -2, :] + tensor[:,", ":,:, :, -1, :] = tensor[:, :, :, -2] +", "identity_map = identity_map[:n_batch] resampled, new_spacing = resample_image(I, spacing, desiredSize, spline_order=spline_order,", "elif dim == 3: return _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) else:", "is None: img_sz = I.shape[2:] spacing = 1. / (np.array(img_sz)", "floating point: torch.DoubleTensor, torch.cuda.DoubleTensor # 16 - bit floating point:", "'xavier': net.apply(weights_init_xavier) elif init_type == 'kaiming': net.apply(weights_init_kaiming) elif init_type ==", "d1: dictionary 1 :param d2: dictionary 2 :return: resulting dictionary", "practice anyway sz = v.size() reduction_factor = np.prod(np.array(sz)) condition =", "the scalar momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum, batchxXxYxZ", "= id.reshape(1, sz[0]) # add a dummy first index #", "parameterization \"\"\" #todo: check that this is the correct way", "return output return __time_warped_function def interoplate_boundary_right(tensor): dim = len(tensor.shape)-2 if", "sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') return input def forward(self,", "if init_type == 'rd_normal': net.apply(weights_init_rd_normal) elif init_type == 'normal': net.apply(weights_init_normal)", "( desiredSizeNC[2::].astype('float') - 1.)) ########################################### if identity_map is not None:", "images :return: returns vector field of size nrOfIxdimxXxYxZ \"\"\" dim", "= m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.uniform(m.weight.data, 0.038,", ":return: resulting dictionary \"\"\" d = d1.copy() d.update(d2) return d", "[1,2,3]: raise ValueError('Only supports dimensions 1, 2, and 3.') phiR", "mask # def compute_omt_const(stds,param,dim): # omt_power = param['forward_model']['smoother']['omt_power'] # omt_weight_penalty", "== 2: phiR[0, ...] = Ab[0] * phi[0, ...] +", "nrOfC nrOfC = sz[1] for c in range(nrOfC): # loop", "for the different dimensions :return: Normalized Gaussian evaluated at coordinates", "return MyTensor(*(csz.tolist())).normal_(0.,1e-7) def create_vector_parameter(nr_of_elements): \"\"\"Creates a vector parameters with a", "nr_of_images = Ab.size()[0] if nr_of_images != sz[0]: raise ValueError('Incompatible number", "elif dim == 2: # id = np.mgrid[0:sz[0], 0:sz[1]] #", "\"nn interpolation is not precise\" else: raise ValueError(\" the label", "\"\"\"Various utility functions. .. todo:: Reorganize this package in a", "= gaussian_std_weights[g] tmp = AdaptVal(weights) if get_preweight_from_network: tmp.requires_grad = True", "== 1: return float(v) def checkNan(x): \"\"\"\" input should be", "loop over all the channels and add the results m", "# print(classname) if classname.find('Conv') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif", "for the omt \"\"\" dim = len(img_sz) mask_sz = [1,1]+", "by 0.5*spacing # # :param sz: just the spatial dimensions,", "print('WARNING: What should the spacing be here? Needed for new", "else: tmp = Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return tmp def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\"", "dim == 2: phiR[0, ...] = Ab[0] * phi[0, ...]", "corresponding low-res size from a (high-res) sz. :param sz: size", "# return idnp # # def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform:", "\"\"\" if spacing is None: img_sz = I.shape[2:] spacing =", "+ bn (optional) + relu def __init__(self, in_channels, out_channels, kernel_size,", "1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_kaiming(m): classname = m.__class__.__name__ #", "functions. .. todo:: Reorganize this package in a more meaningful", "2.))) g = g/g.sum() return g elif dim == 3:", "desiredSize: B C X Y Z :param spline_order: :param zero_boundary:", "std * np.ones(dim) print('WARNING: What should the spacing be here?", "== 0: stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order", "map') for n in range(nrOfI): id[n, ...] = centered_identity_map(sz[2::], spacing,dtype=dtype)", "and 3.') return phiR def apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies an affine transform", "interoplate_boundary_right(tensor): dim = len(tensor.shape)-2 if dim==1: tensor[:,:,-1]= tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3] if", "dim == 3: m[:, 0, :, :, :] = fdt.dXc(I)*lam", "in BxCxXxYxZ format :param spacing: list with spacing information [sx,sy,sz]", "spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" Resample an image to", "= 1 elif Ab.shape[1]==6: dim = 2 elif Ab.shape[1]==12: dim", "= len(gaussian_std_weights) csz = np.array(sz) # just to make sure", "= CAx+Cb+d :param Ab: B x pars (batch size x", "raise ValueError('Unknown data type: ' + str( type(v.data))) def lift_to_dimension(A,", "else: os.remove(abs_t_with_right_ext) # now we can do the symlink os.symlink(abs_s,abs_t_with_right_ext)", ":, :] = fdt.dYc(I)*lam m[:, 2, :, :, :] =", "Ab: Parameter vectors B x pars (batch size x param.", "v.data.dtype == torch.DoubleTensor or type(v.data) == torch.cuda.DoubleTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor,", "view of A of appropriate dimension \"\"\" current_dim = len(A.shape)", "to finish running torch.cuda.synchronize() print(start.elapsed_time(end)) return output return __time_warped_function def", "np.ones(dim) print('WARNING: What should the spacing be here? Needed for", "print((torch.sum(gi[0].data), torch.sum(gi[1].data))) print(\"Grad Output\") print(torch.sum(go[0].data)) return gi[0], gi[1], gi[2] class", ":] = id[2] else: raise ValueError('Only dimensions 1-3 are currently", "Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return tmp def create_local_filter_weights_parameter_multiN(sz,gaussian_std_weights, nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\" Create vector field", "= np.array(sz) # just to make sure it is a", "torch.HalfTensor or type(v.data) == torch.cuda.HalfTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else:", "dim == 2: # idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype)", "- tensor[:, :, -3, :] tensor[:, :,:, :, -1, :]", "dimension to be even: fix properly in the Fourier transform", "# print(classname) if classname.find('Conv') != -1: init.normal(m.weight.data) elif classname.find('Linear') !=", "Input image (expected to be of BxCxXxYxZ format) :param spacing:", "the affine transformation as given by the column vector Ab", "input = organize_data(m,self.s,sched='depth_concat') elif self.net_sched == 'm_d_s': input = organize_data(m,", ":return: low res size \"\"\" if (factor is None) or", "\"\"\"Creates a vector parameters with a specified number of elements.", "input = organize_data(m, new_s, sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat')", "v = v.permute(0,2,3,4,1) return v def get_scalar(v): if isinstance(v, float):", "check that the batch size and the number of channels", "net.apply(weights_init_normal) elif init_type == 'uniform': net.apply(weights_init_uniform) elif init_type == 'xavier':", "[1]+ [len(stds)] +[1]*(dim+1) # return omt_const.view(*sz) def get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params =", "\\ [ConvBnRel(self.dim + 1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,", "init.normal(m.weight.data) elif classname.find('Linear') != -1: init.normal(m.weight.data) elif classname.find('BatchNorm2d') != -1:", "this is the correct way of doing it return spacing", "def weights_init_normal(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') !=", "the spatial spacing :param desiredSize: array for the desired size", "lowResSize[-1] -= 1 print( '\\n\\nWARNING: forcing last dimension to be", "end = torch.cuda.Event(enable_timing=True) start.record() output = f(input) end.record() # Waits", "from a (high-res) sz. :param sz: size (high-res) :param factor:", "normalized Gaussian. :param X: map with coordinates at which to", "sure that subsequent sums work (hence will be smaller than", "/ (2 * np.power(sig[2], 2.))) g = g / g.sum()", "as given by the column vector Ab to the identity", "downsampling \"\"\" desiredSize = desiredSize[2:] is_numpy = False if not", "return the self.net and self.net_input padding_size = (kernel_size-1)//2 if self.net_sched", "low_res_image def individual_parameters_to_model_parameters(ind_pars): model_pars = dict() if type(ind_pars) == type(dict()):", "from torch.nn.parameter import Parameter from torch.autograd import Variable from .libraries.modules.stn_nd", "omt \"\"\" dim = len(img_sz) mask_sz = [1,1]+ list(img_sz) mask", "== 'elu': self.active_unit = nn.ELU(inplace=True) else: self.active_unit = None def", "from the scalar momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum,", "for the identity map') return idnp def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05): \"\"\"generate", "should be a dictionary could contain ['s'],['t'] super(AdpSmoother, self).__init__() self.dim", "= Parameter(tmp) return tmp def create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1, nrOfC=1): \"\"\" Create", "-2] + tensor[:, :, :, -2] - tensor[:, :, :,", "== 0: # return get_warped_label_map(I0,phi,spacing) stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False,", "== 1: m[:, 0, :] = fdt.dXc(I)*lam elif dim ==", "* spacing_ratio[0] # idnp[1, :, :, :] = id[1] *", "new_var_list = var_list # return new_var_list # def identity_map(sz,spacing,dtype='float32'): \"\"\"", "= len(v.shape)-2 if dim ==2: v = v.permute(0,2,3,1) if dim", "nrOfI = sz[0] nrOfC = sz[1] desiredSizeNC = np.array([nrOfI, nrOfC]", "dim = 1 elif Ab.shape[1] == 6: dim = 2", "= Ab[2] * phi[0, ...] + Ab[5] * phi[1, ...]", "if dim == 1: m[:, 0, :] = fdt.dXc(I)*lam elif", "in 3D) :param nrOfI: number of images :param nrOfC: number", "if dim == 1: id = id.reshape(1, sz[0]) # add", "dim==3: id = np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else: raise ValueError('Only dimensions 1-3 are", "= self.bn(x) if self.active_unit is not None: x = self.active_unit(x)", "# spacing_ratio =spacing/min_spacing # dim = spacing.size # spacing_ratio_t =", "= id[2] else: raise ValueError('Only dimensions 1-3 are currently supported", "# set the default if sched =='w_K_w': gaussian_std_weights = [torch.sqrt(std_w)", "dimensions, i.e., XxYxZ :param spacing: list with spacing information [sx,sy,sz]", "columns stacked on top of each other. :param Ab: parameter", "return id def identity_map_multiN(sz,spacing,dtype='float32'): \"\"\" Create an identity map :param", "index for d in range(dim): id[d]*=spacing[d] #id[d]*=2./(sz[d]-1) #id[d]-=1. # and", "/ (np.array(img_sz) - 1) if identity_map is not None: #", "in a dim+1 array and rescale by the ratio #", "[%s]' % init_type) if init_type == 'rd_normal': net.apply(weights_init_rd_normal) elif init_type", "AdpSmoother(nn.Module): \"\"\" a simple conv. implementation, generate displacement field \"\"\"", "self.net_sched == 'm_only': if debugging: self.net = nn.Conv2d(2, 2, kernel_size,", "!= -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def init_weights(net, init_type='normal'):", "1, :, :, :] = fdt.dYc(I)*lam m[:, 2, :, :,", "id = np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else: raise ValueError('Only dimensions 1-3 are currently", "as get_parameter_list_from_parameter_dict; but also returns a dictionary which keeps track", "np.array([nr_of_images, dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0., 1e-7) def create_ND_vector_field_variable(sz): \"\"\"Create vector field", "ConvBnRel(20,self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid: net += [nn.Sigmoid()]", "dim = spacing.size # spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio)) # sp_sz =", "* phi[0, ...] + Ab[4] * phi[1, ...] + Ab[7]", "centered_min_normalized_identity_map(sz, spacing, dtype='float32'): # \"\"\" # Returns a centered identity", "sampler.downsample_image_to_size(I, spacing, low_res_size[2::],spline_order) return low_res_image def individual_parameters_to_model_parameters(ind_pars): model_pars = dict()", "+ Ab[3] * phi[1, ...] + Ab[6] * phi[2, ...]", "- bit floating point: torch.DoubleTensor, torch.cuda.DoubleTensor # 16 - bit", "# number of images m = create_ND_vector_field_variable_multiN(sz[2::], nrOfI) # attention", ":param v: torch array :return: numpy array \"\"\" return (v.detach()).cpu().numpy()", "Ab[0] * phi[0, ...] + Ab[3] * phi[1, ...] +", "2, and 3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1,2) Ab_inv =", ":,-2,:] + tensor[:, :,-2,:] - tensor[:, :,-3,:] tensor[:, :, :,-1]", "id.astype(dtype) ) if dim==1: id = id.reshape(1,sz[0]) # add a", "\"\"\" # Returns a centered identity map (with 0 in", "of size BxCxXxYxZ \"\"\" dim = I0.dim()-2 if dim ==", "1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5, active_unit=active_unit,", "i.e, 1 entry for 1D, 2 for 2D, and 3", "bit floating point: torch.HalfTensor, torch.cuda.HalfTensor # todo: maybe find a", "check the spacing here before running this code') spacing =", "None: x = self.active_unit(x) return x class FcRel(nn.Module): # fc+", "\"\"\" dim = len(sz) csz = np.array(sz) # just to", "set_affine_transform_to_identity(Ab[nrI, :]) def get_inverse_affine_param(Ab): \"\"\"Computes inverse of affine transformation. Formally:", "updated_param def apply_affine_transform_to_map(Ab,phi): \"\"\"Applies an affine transform to a map.", "else: idDes = AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing))) # now use this map", "[1,1]+ list(img_sz) mask = AdaptVal(torch.zeros(*mask_sz)) if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif", "pow ==3: mask = mask*mask*mask return mask # def compute_omt_const(stds,param,dim):", "affine transform parameter column vector :param phi: map; format nrCxXxYxZ", "in CUDA at the moment). ' 'Some functionality may not", "[len(stds)] +[1]*(dim+1) # return omt_const.view(*sz) def get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params = pars.ParameterDict()", "desiredSize[0] = n_batch identity_map = identity_map[:n_batch] resampled, new_spacing = resample_image(I,", "spacing * (np.array(sz[2::]) - 1) / (np.array(lowResSize[2::]) - 1) ##########################################", "import str # from builtins import range import torch from", "numpy data-type ('float32', 'float64', ...) :return: returns the identity map", "coordinates in X Example:: >>> mu, sig = [1,1], [1,1]", "dim == 1: # id = id.reshape(1, sz[0]) # add", "== 3: idnp = np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) idnp[0,", "nn import torch.nn.init as init from . import module_parameters as", "spacing_ratio =min_spacing/spacing # dim = spacing.size # spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio))", "is to make sure that subsequent sums work (hence will", "now get it into range [0,(sz-1)*spacing]^d id = np.array( id.astype(dtype)", "symlink os.symlink(abs_s,abs_t_with_right_ext) def combine_dict(d1,d2): \"\"\"Creates a dictionary which has entries", "parameterization :return: returns spacing of low res parameterization \"\"\" #todo:", "12: dim = 3 if dim not in [1, 2,", "currently fix for symmetric training if I.shape[0] != identity_map.shape[0]: n_batch", "else: self.active_unit = None def forward(self, x): x = self.fc(x)", "= AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC, newspacing))) # now use this map for resampling", "return low_res_image def individual_parameters_to_model_parameters(ind_pars): model_pars = dict() if type(ind_pars) ==", "# check if here should be add assert assert abs(torch.sum(warped_label_map.data", "lowResSize[2::] = (np.ceil((np.array(sz[2:]) * factor))).astype('int16') else: lowResSize[2::] = (np.ceil((np.array(sz[2:]) *", "dim: raise ValueError('Can only add dimensions, but not remove them')", "displacement field \"\"\" def __init__(self, inputs, dim, net_sched=None): # settings", "sched='depth_concat') elif self.net_sched == 'm_f_s_t': input = organize_data(m, self.s, sched='depth_concat')", "0 if Ab.shape[1]==2: dim = 1 elif Ab.shape[1]==6: dim =", "if bn else None if active_unit == 'relu': self.active_unit =", "# # :param sz: just the spatial dimensions, i.e., XxYxZ", "elif classname.find('Linear') != -1: space_normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data,", "overwritten with the identity transform) :return: \"\"\" dim = get_dim_of_affine_transform(Ab)", "if NaNs are present, False else \"\"\" return (x !=", "dim+1, dim).transpose(1, 2) Cd = Cd.view(Cd.shape[0], dim+1, dim).transpose(1, 2) updated_param", "return m def create_ND_vector_field_variable_multiN(sz, nr_of_images=1): \"\"\" Create vector field torch", "I, nrOfI, sz, spacing): \"\"\"Computes the vector momentum from the", "def get_res_size_from_size(sz, factor): \"\"\" Returns the corresponding low-res size from", "csz = np.array([nr_of_images, dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0., 1e-7) def create_ND_vector_field_variable(sz): \"\"\"Create", "self.net = nn.Conv2d(2, 2, kernel_size, 1, padding=padding_size, bias=False,groups=2) else: net", "is image dim, not nrOfC nrOfC = sz[1] for c", "== 1: id = np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif dim == 2: id", "self.net = nn.Sequential(*net) elif self.net_sched == 'm_d_s': if debugging: self.net", "identity_map=None): \"\"\" :param I: B C X Y Z :param", "dim=1) elif sched == 'width_concat': input = torch.cat((moving, target), dim=3)", "sched='nn'): if sched == 'nn': warped_label_map = compute_warped_image_multiNC(label_map, phi, spacing,spline_order=0,zero_boundary=True)", "necessary). :param A: numpy array :param dim: desired dimension of", "def forward(self, m,new_s=None): m = m * self.mask input =", "elif Ab.shape[1] == 12: dim = 3 if dim not", "spx spy spz :param desiredSize: B C X Y Z", "dimension dim (by adding dummy dimensions if necessary). :param A:", "self.bn is not None: x = self.bn(x) if self.active_unit is", "=='w_K_w': gaussian_std_weights = [torch.sqrt(std_w) for std_w in gaussian_std_weights] for g", "even: fix properly in the Fourier transform later!\\n\\n') return lowResSize", "is a numpy array csz = np.array([dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,1e-7) def", "method is not implemented\") return warped_label_map def t2np(v): \"\"\" Takes", "0.02) init.constant(m.bias.data, 0.0) def weights_init_rd_normal(m): classname = m.__class__.__name__ # print(classname)", "# \"\"\" # Returns a centered identity map (with 0", "transform parameter column vector :param phi: map; format nrCxXxYxZ (nrC", "6: dim = 2 elif Ab.shape[1] == 12: dim =", "['s'],['t'] super(AdpSmoother, self).__init__() self.dim = dim self.net_sched = 'm_only' self.s", "nn.Conv2d(2, 2, kernel_size, 1, padding=padding_size, bias=False,groups=2) else: net = \\", "3) \"\"\" nr = len(Ab) if nr==2: return 1 elif", "dim == 3: phiR[0, ...] = Ab[0] * phi[0, ...]", "-3] tensor[:, :,:, :, :, -1] = tensor[:, :, :,", "stride, padding=padding, bias=bias) else: self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride,", "= compute_warped_image_multiNC(I, idDes, newspacing, spline_order, zero_boundary) return ID if not", "tmp = AdaptVal(weights) if get_preweight_from_network: tmp.requires_grad = True else: tmp", "idnp[2, :, :, :] = id[2] else: raise ValueError('Only dimensions", "def get_warped_label_map(label_map, phi, spacing, sched='nn'): if sched == 'nn': warped_label_map", "the identity map \"\"\" dim = len(sz)-2 nrOfI = int(sz[0])", "adding dummy dimensions if necessary). :param A: numpy array :param", "None: img_sz = I.shape[2:] spacing = 1. / (np.array(img_sz) -", "tensors[n,c] = torch.from_numpy(g) def weights_init_uniform(m): classname = m.__class__.__name__ # print(classname)", "\"\"\" dim = len(img_sz) mask_sz = [1,1]+ list(img_sz) mask =", "= np.mgrid[0:sz[0], 0:sz[1]] elif dim == 3: id = np.mgrid[0:sz[0],", "tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3] if dim==2: tensor[:, :, -1,:] = tensor[:, :,-2,:]", "i.e., XxYxZ # :param spacing: list with spacing information [sx,sy,sz]", "input=None if self.net_sched == 'm_only': input = m elif self.net_sched", "...] + Ab[5] # a_12x+a_22y+b2 elif dim == 3: phiR[0,", "#odd id[d] -= spacing[d]*((sz[d]+1)//2) # and now store it in", "add a dummy first index for d in range(dim): id[d]*=spacing[d]", "identity_map=identity_map) return resampled def resample_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None):", "dictionary could contain ['s'],['t'] super(AdpSmoother, self).__init__() self.dim = dim self.net_sched", "vector momentum \"\"\" fdt = fd.FD_torch(spacing) dim = len(sz) m", "if sz[d]%2==0: # #even # id[d] -= spacing[d]*(sz[d]//2) # else:", "nrCxXxYxZ (nrC corresponds to dimension) :return: returns transformed map \"\"\"", "sz[d]%2==0: #even id[d] -= spacing[d]*(sz[d]//2) else: #odd id[d] -= spacing[d]*((sz[d]+1)//2)", "later!\\n\\n') return lowResSize def get_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\" Computes spacing", "for the centered identity map') return idnp # # def", "[1,1], [1,1] >>> X = [0,0] >>> print(compute_normalized_gaussian(X, mu, sig)", "zero_boundary=zero_boundary, identity_map=identity_map) return resampled def resample_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False,", "s_m = sf.SmootherFactory(sz, spacing).create_smoother(s_m_params) return s_m def get_warped_label_map(label_map, phi, spacing,", "...] = Ab[0] * phi[0, ...] + Ab[2] * phi[1,", "m.__class__.__name__ print(classname) if classname.find('Conv') != -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('Linear')", "are currently supported for the identity map') return idnp def", "desiredSize, spline_order=spline_order, zero_boundary=zero_boundary, identity_map=identity_map) return resampled def resample_image(I, spacing, desiredSize,", "stride=1, active_unit='relu', same_padding=False, bn=False, reverse=False, bias=False): super(ConvBnRel, self).__init__() padding =", "is_numpy else ID.numpy(), newspacing def get_res_size_from_size(sz, factor): \"\"\" Returns the", "be warped in dimensions 1 to 3') def _get_low_res_spacing_from_spacing(spacing, sz,", "'m_f_s_t': if debugging: self.net = nn.Conv2d(self.dim+2, self.dim, kernel_size, 1, padding=padding_size,", "dtype=dtype) idnp[0, :, :, :] = id[0] idnp[1, :, :,", "self.t, sched='depth_concat') return input def forward(self, m,new_s=None): m = m", "= [] for key in pd: pl.append(pd[key]) par_to_name_dict[pd[key]] = key", "0.5*spacing :param sz: just the spatial dimensions, i.e., XxYxZ :param", "\"\"\"Warps image. :param I0: image to warp, image size BxCxXxYxZ", "from .spline_interpolation import SplineInterpolation_ND_BCXYZ import os try: from .libraries.functions.nn_interpolation import", "False else \"\"\" return (x != x).any() def create_symlink_with_correct_ext(sf, tf):", "\"\"\"Warps image. :param I0: image to warp, image size XxYxZ", "phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 3: return _compute_warped_image_multiNC_3d(I0, phi,", "= len(sz) # if dim == 1: # id =", "Waits for everything to finish running torch.cuda.synchronize() print(start.elapsed_time(end)) return output", "vector) :return: Inverse of affine parameters \"\"\" dim =0 if", "Y Z :param spline_order: :param zero_boundary: :param identity_map: :return: \"\"\"", "np.mgrid[0:sz[0], 0:sz[1]] # elif dim == 3: # id =", "1) // 2) if same_padding else 0 if not reverse:", "same_padding else 0 if not reverse: self.conv = nn.Conv2d(in_channels, out_channels,", "transform to a map. :param Ab: affine transform parameter column", "len(img_sz) mask_sz = [1,1]+ list(img_sz) mask = AdaptVal(torch.ones(*mask_sz))*mask_value if dim", "low res parameterization \"\"\" #todo: check that this is the", "self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=padding, bias=bias) else: self.conv", ":return: dimensionality of transform (1,2,or 3) \"\"\" nr = len(Ab)", "if dim == 1: phiR = phi * Ab[0] +", "Computes spacing for the low-res parameterization from image spacing :param", "\"\"\" d = d1.copy() d.update(d2) return d def get_parameter_list_from_parameter_dict(pd): \"\"\"Takes", "phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not in [0, 1, 2,", "range [0,(sz-1)*spacing]^d id = np.array(id.astype(dtype)) if dim == 1: id", "def weights_init_kaiming(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') !=", "dtype=dtype) # idnp[0, :] = id[0] * spacing_ratio[0] # elif", "both of them. :param d1: dictionary 1 :param d2: dictionary", "to dimension) :return: returns transformed maps \"\"\" sz = phi.size()", ">>> mu, sig = [1,1], [1,1] >>> X = [0,0]", "for var in var_list] # else: # new_var_list = var_list", "transform parameter column vectors (batch size x param. vector) :param", "correct way of doing it return spacing * (np.array(sz[2::])-1) /", "None: x = self.bn(x) if self.active_unit is not None: x", "1-3 are currently supported for the identity map') # now", "# # min_spacing = np.min(spacing) # spacing_ratio = spacing/min_spacing #", "+ compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c, ...], I[:, c, ...], nrOfI, sz[2::], spacing)", "data-type ('float32', 'float64', ...) :return: returns the identity map \"\"\"", "= \\ [ConvBnRel(self.dim +1, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20,", "def weights_init_xavier(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') !=", "def remove_infs_from_variable(v): # 32 - bit floating point: torch.FloatTensor, torch.cuda.FloatTensor", "dimensions 1, 2, and 3.') return phiR def apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies", "attention that the second dimension here is image dim, not", "omt_const = omt_const*omt_weight_penalty/(EV.reg_factor_in_mermaid*2) # sz = [1]+ [len(stds)] +[1]*(dim+1) #", "vector momentum in dimensions 1-3') return m def create_ND_vector_field_variable_multiN(sz, nr_of_images=1):", "== 12: dim = 3 if dim not in [1,", "or v.data.dtype==torch.float32: return torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif v.data.dtype == torch.DoubleTensor", "has entries from both of them. :param d1: dictionary 1", "== 1: idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0, :] =", "I.shape[0] desiredSize = desiredSize.copy() desiredSize[0] = n_batch identity_map = identity_map[:n_batch]", "now we can do the symlink os.symlink(abs_s,abs_t_with_right_ext) def combine_dict(d1,d2): \"\"\"Creates", "2 elif Ab.shape[1]==12: dim = 3 if dim not in", "dim == 2: idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0,", "mask = sm.smooth(mask) return mask.detach() def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2): \"\"\"generate a", "dimension) :return: returns transformed maps \"\"\" sz = phi.size() dim", "= sz[0] # number of images m = create_ND_vector_field_variable_multiN(sz[2::], nrOfI)", "Ab[5] * phi[1, ...] + Ab[8] * phi[2, ...] +", "\"\"\"Computes a normalized Gaussian. :param X: map with coordinates at", "import print_function from __future__ import absolute_import # from builtins import", "elif dim==3: Ab.zero_() Ab[0]=1. Ab[4]=1. Ab[8]=1. else: raise ValueError('Only supports", ":param spacing: list with spacing information [sx,sy,sz] # :param dtype:", "present, False else \"\"\" return (x != x).any() def create_symlink_with_correct_ext(sf,", "# if ind_pars is not a dictionary assume that they", "low_res_size as factor was ' + str(factor)) return sz else:", "kernel_size, 1, padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim +1,", "Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0], -1) return inv_affine_param def update_affine_param(Ab, Cd): \"\"\"Update affine", "class ConvBnRel(nn.Module): # conv + bn (optional) + relu def", "same_padding=True, bn=using_bn), ConvBnRel(20,self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid: net", "id def identity_map_multiN(sz,spacing,dtype='float32'): \"\"\" Create an identity map :param sz:", "def __init__(self, in_channels, out_channels, kernel_size, stride=1, active_unit='relu', same_padding=False, bn=False, reverse=False,", "2 != 0: lowResSize[-1] -= 1 print( '\\n\\nWARNING: forcing last", "def create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1, nrOfC=1): \"\"\" Create vector field torch Parameter", "if self.net_sched == 'm_only': if debugging: self.net = nn.Conv2d(2, 2,", "Ab.shape[1]==2: dim = 1 elif Ab.shape[1]==6: dim = 2 elif", "=0.05): \"\"\"generate a smooth weight mask for the omt \"\"\"", "2.) / (2 * np.power(sig[2], 2.))) g = g /", "== torch.cuda.HalfTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else: raise ValueError('Unknown data", "image in BxCxXxYxZ format :param spacing: list with spacing information", "Ab[5] # a_12x+a_22y+b2 elif dim == 3: phiR[0, ...] =", "Ab[1] * phi[0, ...] + Ab[4] * phi[1, ...] +", "# elif dim == 3: # id = np.mgrid[0:sz[0], 0:sz[1],", ":param phi: map for the warping, size BxdimxXxYxZ :param spacing:", "centered identity map') return idnp # # def centered_min_normalized_identity_map(sz, spacing,", "to make sure that subsequent sums work (hence will be", "dim==2: tensor[:, :, -1,:] = tensor[:, :,-2,:] + tensor[:, :,-2,:]", "_get_low_res_size_from_size(sz, factor): \"\"\"Returns the corresponding low-res size from a (high-res)", "just the spatial dimensions, i.e., XxYxZ # :param spacing: list", "* self.mask input = self.prepare_data(m,new_s) x= input x = self.net(x)", "id = np.zeros([nrOfI, 3, sz[2], sz[3], sz[4]], dtype=dtype) else: raise", "net.apply(weights_init_xavier) elif init_type == 'kaiming': net.apply(weights_init_kaiming) elif init_type == 'orthogonal':", ":return: returns the warped image of size BxCxXxYxZ \"\"\" dim", "pl = [] for key in pd: pl.append(pd[key]) return pl", "identity transform) :return: \"\"\" dim = get_dim_of_affine_transform(Ab) if dim==1: Ab.zero_()", "transform) :return: \"\"\" dim = get_dim_of_affine_transform(Ab) if dim==1: Ab.zero_() Ab[0]=1.", "csz = np.array([dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,1e-7) def create_vector_parameter(nr_of_elements): \"\"\"Creates a vector", "= True else: tmp = Parameter(tmp) return tmp def create_ND_scalar_field_parameter_multiNC(sz,", "= len(sz) if dim==1: id = np.mgrid[0:sz[0]] elif dim==2: id", "* (np.array(sz[2::]) - 1) / (np.array(lowResSize[2::]) - 1) ########################################## Adaptive", "dimensions corresponding to an affine transformation of the form y=Ax+b", "len(mu) if dim == 1: g = np.exp(-np.power(X[0, :] -", "id = np.zeros([nrOfI, 2, sz[2], sz[3]], dtype=dtype) elif dim ==", "be smaller than it could be, # but values of", "is None) or (factor >= 1): print('WARNING: Could not compute", "spline_order=1, zero_boundary=False, identity_map=None): \"\"\" :param I: B C X Y", "!= len(sz)-2: raise ValueError('Incompatible number of affine transforms') phiR =", "= [1,1], [1,1] >>> X = [0,0] >>> print(compute_normalized_gaussian(X, mu,", "(shifted so it is centered around 0) :param sz: size", "element has a dictionary with keys 'name' and 'model_params' for", "+ Ab[11] else: raise ValueError('Only supports dimensions 1, 2, and", "id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] # else: # raise ValueError('Only", "2) updated_param = torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_param =", "come from the optimizer # (i.e., list and each list", "apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies an affine transform to maps (for arbitrary batch", "Ab[6] * phi[2, ...] + Ab[9] phiR[1, ...] = Ab[1]", "np.array(factor)))).astype('int16') if lowResSize[-1] % 2 != 0: lowResSize[-1] -= 1", "mode='fan_in') elif classname.find('Linear') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('BatchNorm2d')", "from image spacing. :param spacing: image spacing :param sz: size", "input to an optimizer. :param pd: parameter dictionary :return: list", "if dim ==2: v = v.permute(0,2,3,1) if dim ==3: v", "I = torch.Tensor(I) is_numpy = True sz = np.array(list(I.size())) #", "9 are supported') if spline_order == 0: # return get_warped_label_map(I0,phi,spacing)", "if classname.find('Conv') != -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('Linear') != -1:", "= g / g.sum() return g else: raise ValueError('Can only", "-1,:] = tensor[:, :,-2,:] + tensor[:, :,-2,:] - tensor[:, :,-3,:]", "= spacing * ((sz[2::].astype('float') - 1.) / ( desiredSizeNC[2::].astype('float') -", "(2 * np.power(sig[1], 2.)) -np.power(X[2,:, :, :] - mu[2], 2.)", "spline_order,zero_boundary,use_01_input=use_01_input) else: raise ValueError('Images can only be warped in dimensions", "if pow ==3: mask = mask*mask*mask return mask # def", "we can do the symlink os.symlink(abs_s,abs_t_with_right_ext) def combine_dict(d1,d2): \"\"\"Creates a", ":, :] - mu[2], 2.) / (2 * np.power(sig[2], 2.)))", "dummy dimensions if necessary). :param A: numpy array :param dim:", "in range(Ab.shape[0]): tm_param = torch.matmul(Cd[n,:,:dim],Ab[n,:,:dim]) updated_param[n,:,:dim] = tm_param updated_param[n,:,dim] =", "vector \"\"\" return Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7)) def create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False): \"\"\"Create vector", "range(dim): id[d] *= spacing[d] if sz[d]%2==0: #even id[d] -= spacing[d]*(sz[d]//2)", "np from . import finite_differences as fd import torch.nn as", "identity map') for n in range(nrOfI): id[n,...] = identity_map(sz[2::],spacing,dtype=dtype) return", "- tensor[:, :, :,-3] if dim==3: tensor[:, :,:, -1,:, :]", ":param identity_map: :return: \"\"\" if spacing is None: img_sz =", "* np.power(sig[2], 2.))) g = g / g.sum() return g", "phi[1, ...] + Ab[6] * phi[2, ...] + Ab[9] phiR[1,", "= create_ND_vector_field_variable_multiN(sz, nrOfI) if dim == 1: m[:, 0, :]", "= int(sz[0]) if dim == 1: id = np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif", "I: image, batchXxYxZ :param sz: size of image :param spacing:", "2: return _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 3:", "arbitrary batch size). :param Ab: affine transform parameter column vectors", "\"\"\"Applies an affine transform to a map. :param Ab: affine", "spacing_ratio[0] # idnp[1, :, :, :] = id[1] * spacing_ratio[1]", "= key return pl, par_to_name_dict def remove_infs_from_variable(v): # 32 -", "spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not in [0, 1, 2, 3,", "elif sched == 'difference': input = moving-target return input def", "(optional) + relu def __init__(self, in_channels, out_channels, kernel_size, stride=1, active_unit='relu',", "3.') def set_affine_transform_to_identity(Ab): \"\"\"Sets the affine transformation as given by", "omt_power = param['forward_model']['smoother']['omt_power'] # omt_weight_penalty = param['forward_model']['smoother']['omt_weight_penalty'] # min_std =", "XxYxZ # :param spacing: list with spacing information [sx,sy,sz] #", "desiredSize[2:] is_numpy = False if not isinstance(I, torch.Tensor): I =", "idnp = np.zeros([1, sz[0]], dtype=dtype) # idnp[0, :] = id[0]", "...] + Ab[8] * phi[2, ...] + Ab[11] else: raise", "be <1) :return: low res size \"\"\" if (factor is", "max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else: raise ValueError('Unknown data type: ' + str( type(v.data)))", "spacing after downsampling \"\"\" desiredSize = desiredSize[2:] is_numpy = False", "id[n,...] = identity_map(sz[2::],spacing,dtype=dtype) return id def centered_identity_map(sz, spacing, dtype='float32'): \"\"\"", "dim = len(sz) # if dim == 1: # id", "= 2 elif Ab.shape[1] == 12: dim = 3 if", "in practice anyway sz = v.size() reduction_factor = np.prod(np.array(sz)) condition", "+ Ab[2] * phi[1, ...] + Ab[4] # a_11x+a_21y+b1 phiR[1,", "# inputs should be a dictionary could contain ['s'],['t'] super(AdpSmoother,", "in_channels, out_channels, kernel_size, stride=1, active_unit='relu', same_padding=False, bn=False, reverse=False, bias=False): super(ConvBnRel,", "identity map') # now get it into range [0,(sz-1)*spacing]^d id", "# min_spacing = np.min(spacing) # spacing_ratio =spacing/min_spacing # dim =", "also returns a dictionary which keeps track of the keys", ":, -2, :] - tensor[:, :, -3, :] tensor[:, :,:,", "to an affine transformation of the form y=Ax+b stored in", "map') # now get it into range [0,(sz-1)*spacing]^d id =", "mermaid.image_sampling as IS sampler = IS.ResampleImage() low_res_image, _ = sampler.downsample_image_to_size(I,", "AdaptVal(weights) if get_preweight_from_network: tmp.requires_grad = True else: tmp = Parameter(tmp)", "assert assert abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))< 0.1, \"nn interpolation is not precise\"", "idnp[0,:, :, :] = id[0] idnp[1,:, :, :] = id[1]", "else: return A.reshape([1]*(dim-current_dim)+list(A.shape)) def get_dim_of_affine_transform(Ab): \"\"\"Returns the number of dimensions", "spacing is None: img_sz = I.shape[2:] spacing = 1. /", "'m_d_s': if debugging: self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size, 1, padding=padding_size,", "!= -1: init.uniform(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data,", ":, -3] tensor[:, :,:, :, :, -1] = tensor[:, :,", "make sure it is a numpy array csz = np.array([nr_of_images,", "\"\"\" dim = get_dim_of_affine_transform(Ab) if dim==1: Ab.zero_() Ab[0]=1. elif dim==2:", "Gaussian evaluated at coordinates in X Example:: >>> mu, sig", "+ tensor[:, :,-2,:] - tensor[:, :,-3,:] tensor[:, :, :,-1] =", "1 elif Ab.shape[1]==6: dim = 2 elif Ab.shape[1]==12: dim =", ":, -2] - tensor[:, :, :, -3] def get_resampled_image(I, spacing,", "np.array([nrOfI, nrOfC] + list(desiredSize)) newspacing = spacing * ((sz[2::].astype('float') -", "def forward(self, x): x = self.conv(x) if self.bn is not", "= np.zeros([nrOfI, 2, sz[2], sz[3]], dtype=dtype) elif dim == 3:", "tensor[:, :,:, -1,:, :] = tensor[:, :, -2, :] +", "contains key value pairs for model parameters and converts it", "def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same as get_parameter_list_from_parameter_dict; but also returns a dictionary", "and now store it in a dim+1 array if dim", "[0,(sz-1)*spacing]^d # id = np.array(id.astype(dtype)) # if dim == 1:", "currently supported for the identity map') # now get it", "2: id = np.zeros([nrOfI, 2, sz[2], sz[3]], dtype=dtype) elif dim", "+ Ab[6] * phi[2, ...] + Ab[9] phiR[1, ...] =", "min_spacing = np.min(spacing) # spacing_ratio = spacing/min_spacing # # #", "input = torch.cat((moving, target), dim=3) elif sched == 'list_concat': input", "== 1: id = np.mgrid[0:sz[0]] elif dim == 2: id", ":param nrOfC: number of channels :return: returns vector field of", "dimensions) Iw = compute_warped_image_multiNC(I0.view(torch.Size([1, 1] + list(I0.size()))), phi.view(torch.Size([1] + list(phi.size()))),", "spacing[d]*(sz[d]//2) else: #odd id[d] -= spacing[d]*((sz[d]+1)//2) # and now store", "affine transformation as given by the column vector Ab to", "...] = Ab[0] * phi[0, ...] + Ab[3] * phi[1,", "id[1] * spacing_ratio[1] # idnp[2, :, :, :] = id[2]", "not precise\" else: raise ValueError(\" the label warping method is", "numpy array \"\"\" dim = len(v.shape)-2 if dim ==2: v", "(factor >= 1): print('WARNING: Could not compute low_res_size as factor", "finite_differences as fd import torch.nn as nn import torch.nn.init as", "init.xavier_normal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data,", "organize_data(input, self.t, sched='depth_concat') elif self.net_sched == 'm_f_s_t': input = organize_data(m,", "is the same nrOfI = sz[0] nrOfC = sz[1] desiredSizeNC", "== 2: id = np.mgrid[0:sz[0], 0:sz[1]] elif dim == 3:", "\"\"\" dim =0 if Ab.shape[1] == 2: dim = 1", "/ (2 * np.power(sig[0], 2.)) -np.power(X[1,:, :, :] - mu[1],", "be overwritten with the identity transform) :return: \"\"\" dim =", "= phi.size() dim = len(sz) - 1 if dim not", "def get_net_sched(self, debugging=True, using_bn=True, active_unit='relu', using_sigmoid=False , kernel_size=5): # return", "Parameter of given size :param sz: just the spatial sizes", "vector field torch Variable of given size :param sz: just", "weights_init_kaiming(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1:", "# todo: check that this is the correct way of", "by the column vector Ab to the identity transform. :param", "init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_xavier(m): classname = m.__class__.__name__", "odd # Otherwise shifts everything by 0.5*spacing # # :param", "= [1]+[dim] +[1]*dim # spacing_ratio_t = spacing_ratio_t.view(*sp_sz) # new_var_list =", "max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif v.data.dtype == torch.DoubleTensor or type(v.data) == torch.cuda.DoubleTensor: return", "'\\n\\nWARNING: forcing last dimension to be even: fix properly in", "overwritten with identity trans. :return: \"\"\" sz = Ab.size() nr_of_images", "and add the results m = m + compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c,", "should the spacing be here? Needed for new identity map", "print(compute_normalized_gaussian(X, mu, sig) \"\"\" dim = len(mu) if dim ==", "factor was ' + str(factor)) return np.array(sz) else: low_res_sz =", "#self.net.register_backward_hook(bh) def get_net_sched(self, debugging=True, using_bn=True, active_unit='relu', using_sigmoid=False , kernel_size=5): #", "if self.bn is not None: x = self.bn(x) if self.active_unit", "dim == 3: # idnp = np.zeros([3, sz[0], sz[1], sz[2]],", "import Variable from .libraries.modules.stn_nd import STN_ND_BCXYZ from .data_wrapper import AdaptVal", "What should the spacing be here? Needed for new identity", "nn.Sequential(*net) elif self.net_sched == 'm_f_s_t': if debugging: self.net = nn.Conv2d(self.dim+2,", ":] = id[0] * spacing_ratio[0] # idnp[1, :, :, :]", "for key in pd: pl.append(pd[key]) return pl def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same", "dim == 1: g = np.exp(-np.power(X[0, :] - mu[0], 2.)/(2*np.power(sig[0],", "= m elif self.net_sched == 'm_f_s': input = organize_data(m,self.s,sched='depth_concat') elif", "# (i.e., list and each list element has a dictionary", "\"\"\"generate a smooth weight mask for the omt \"\"\" dim", "Create a centered identity map (shifted so it is centered", "space normalize for the net kernel :param tensor: :param mean:", "input elements are NaNs. :param x: numpy array :return: True", "spline_order) I1_warped = stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_3d(I0, phi,", "...], I[:, c, ...], nrOfI, sz[2::], spacing) return m def", "= np.mgrid[0:sz[0],0:sz[1]] elif dim==3: id = np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else: raise ValueError('Only", "stds) tensors[n,c] = torch.from_numpy(g) def weights_init_uniform(m): classname = m.__class__.__name__ #", "this # this is to make sure that subsequent sums", "dtype='float32'): \"\"\" Returns a centered identity map (with 0 in", "[0,(sz-1)*spacing]^d id = np.array( id.astype(dtype) ) if dim==1: id =", "as constants. self.bn = nn.BatchNorm2d(out_channels, eps=0.0001, momentum=0, affine=True) if bn", "map :param sz: size of an image in BxCxXxYxZ format", "def __time_warped_function(input=None): start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) start.record() output", "desired dimension of view :return: returns view of A of", ":param lam: scalar momentum, batchxXxYxZ :param I: image, batchXxYxZ :param", "dimensions 1, 2, and 3.') Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1,", "spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 3: return _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input)", "information [sx,sy,sz] :param dtype: numpy data-type ('float32', 'float64', ...) :return:", "classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.kaiming_normal(m.weight.data,", "that subsequent sums work (hence will be smaller than it", "the identity map') # # min_spacing = np.min(spacing) # spacing_ratio", "= m * self.mask input = self.prepare_data(m,new_s) x= input x", "of low res parameterization \"\"\" #todo: check that this is", "numpy array :param dim: desired dimension of view :return: returns", "import numpy as np from . import finite_differences as fd", "should be add assert assert abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))< 0.1, \"nn interpolation", "Ab[0]=1. elif dim==2: Ab.zero_() Ab[0]=1. Ab[3]=1. elif dim==3: Ab.zero_() Ab[0]=1.", "= Cd.view(Cd.shape[0], dim+1, dim).transpose(1, 2) updated_param = torch.zeros_like(Ab) for n", "nr_of_elements: number of vector elements :return: returns the parameter vector", "of size nrOfIxnrOfCxXxYxZ \"\"\" csz = np.array(sz) # just to", "everything by 0.5*spacing :param sz: just the spatial dimensions, i.e.,", "(parameter_list, name_dictionary) \"\"\" par_to_name_dict = dict() pl = [] for", "map. :param Ab: affine transform parameter column vector :param phi:", "the default if sched =='w_K_w': gaussian_std_weights = [torch.sqrt(std_w) for std_w", "parameter column vectors (batch size x param. vector) :param phi:", "0:sz[1]] # elif dim == 3: # id = np.mgrid[0:sz[0],", "3: phiR[0, ...] = Ab[0] * phi[0, ...] + Ab[3]", ":, :, -3] tensor[:, :,:, :, :, -1] = tensor[:,", "not isinstance(factor, list): lowResSize[2::] = (np.ceil((np.array(sz[2:]) * factor))).astype('int16') else: lowResSize[2::]", "apply_affine_transform_to_map(Ab[nrI, :], phi[nrI, ...]) return phiR def compute_normalized_gaussian(X, mu, sig):", "dtype='float32'): \"\"\" Create a centered identity map (shifted so it", "dtype: numpy data-type ('float32', 'float64', ...) # :return: returns the", "v.size == 1: return float(v) def checkNan(x): \"\"\"\" input should", "m * self.mask input = self.prepare_data(m,new_s) x= input x =", "the correct way of doing it return spacing * (np.array(sz[2::])-1)", "fdt.dXc(I)*lam m[:, 1, :, :, :] = fdt.dYc(I)*lam m[:, 2,", "are currently supported for the identity map') # # min_spacing", "-1: space_normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data,", "dictionary which has entries from both of them. :param d1:", "Iw = compute_warped_image_multiNC(I0.view(torch.Size([1, 1] + list(I0.size()))), phi.view(torch.Size([1] + list(phi.size()))), spacing,", "img_sz = I.shape[2:] spacing = 1. / (np.array(img_sz) - 1)", "tensor[:, :, :, -3] tensor[:, :,:, :, :, -1] =", "doing it return spacing * (np.array(sz[2::]) - 1) / (np.array(lowResSize[2::])", "low_res_sz[2::] = (np.ceil((np.array(sz[2::]) * factor))).astype('int16') return low_res_sz def _compute_low_res_image(I, spacing,", "nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=padding, bias=bias) else: self.conv = nn.ConvTranspose2d(in_channels,", "return updated_param def apply_affine_transform_to_map(Ab,phi): \"\"\"Applies an affine transform to a", "torch.autograd import Variable from .libraries.modules.stn_nd import STN_ND_BCXYZ from .data_wrapper import", "= Ab[1] * phi[0, ...] + Ab[3] * phi[1, ...]", "__time_warped_function def interoplate_boundary_right(tensor): dim = len(tensor.shape)-2 if dim==1: tensor[:,:,-1]= tensor[:,:-2]+", "d in range(dim): id[d] *= spacing[d] if sz[d]%2==0: #even id[d]", "based on memory id. :param pd: parameter dictionary :return: tuple", "it is a numpy array csz = np.array([nrOfI,nrOfC]+list(csz)) return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7))", "np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif dim == 2: id = np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif dim", "if classname.find('Conv') != -1: init.normal(m.weight.data) elif classname.find('Linear') != -1: init.normal(m.weight.data)", "g = np.exp(-np.power(X[0,:, :, :] - mu[0], 2.) / (2", "id[0] * spacing_ratio[0] # idnp[1, :, :, :] = id[1]", "\"\"\"Set the affine transforms to the identity (in the case", "0) :param sz: size of an image in BxCxXxYxZ format", "as a numpy array on the cpu :param v: torch", "dtype=dtype) idnp[0,:] = id[0] elif dim==2: idnp = np.zeros([2, sz[0],", "be list of Variable \"\"\" return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for elem in", "to make sure it is a numpy array csz =", "to evaluate :param mu: array indicating the mean :param sig:", "array csz = np.array([dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,1e-7) def create_vector_parameter(nr_of_elements): \"\"\"Creates a", "dim not in [1,2,3]: raise ValueError('Only supports dimensions 1, 2,", "= np.mgrid[0:sz[0]] elif dim==2: id = np.mgrid[0:sz[0],0:sz[1]] elif dim==3: id", "- tensor[:, :, :, -3] tensor[:, :,:, :, :, -1]", "def __init__(self, inputs, dim, net_sched=None): # settings should include [using_bias,", "sz[0], sz[1]], dtype=dtype) # idnp[0, :, :] = id[0] *", ":] = id[1] idnp[2,:, :, :] = id[2] else: raise", "idnp[1, :, :] = id[1] elif dim == 3: idnp", "self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net", "after downsampling \"\"\" desiredSize = desiredSize[2:] is_numpy = False if", "raise ValueError('Can only convert scalar to vector momentum in dimensions", "as init from . import module_parameters as pars from .spline_interpolation", "c, ...], nrOfI, sz[2::], spacing) return m def compute_vector_momentum_from_scalar_momentum_multiN(lam, I,", "id = np.array(id.astype(dtype)) # if dim == 1: # id", "spacing = 1. / (np.array(img_sz) - 1) if identity_map is", "== torch.cuda.FloatTensor or v.data.dtype==torch.float32: return torch.clamp(v, min=(np.asscalar(np.finfo('float32').min))/reduction_factor, max=(np.asscalar(np.finfo('float32').max))/reduction_factor) elif v.data.dtype", "that the second dimension here is image dim, not nrOfC", "active_unit='relu', using_sigmoid=False , kernel_size=5): # return the self.net and self.net_input", "doing it return spacing * (np.array(sz[2::])-1) / (np.array(lowResSize[2::])-1) def _get_low_res_size_from_size(sz,", ":return: returns vector field of size nrOfIxnrOfCxXxYxZ \"\"\" csz =", "2, sz[2], sz[3]], dtype=dtype) elif dim == 3: id =", ".data_wrapper import USE_CUDA import numpy as np from . import", "a (high-res) sz :param sz: size (high-res) :param factor: low-res", ".data_wrapper import AdaptVal from .data_wrapper import MyTensor from . import", ":, :, :] = id[2] * spacing_ratio[2] # else: #", "'rd_normal': net.apply(weights_init_rd_normal) elif init_type == 'normal': net.apply(weights_init_normal) elif init_type ==", "Ab to the identity transform. :param Ab: Affine parameter vector", "== 3: phiR[0, ...] = Ab[0] * phi[0, ...] +", "the warping, size BxdimxXxYxZ :param spacing: image spacing [dx,dy,dz] :return:", "fdt.dXc(I)*lam elif dim == 2: m[:, 0, :, :] =", ":,-3] if dim==3: tensor[:, :,:, -1,:, :] = tensor[:, :,", ":, -2] - tensor[:, :, :, -3] tensor[:, :,:, :,", "remove, currently fix for symmetric training if I.shape[0] != identity_map.shape[0]:", "if not isinstance(factor, list): lowResSize[2::] = (np.ceil((np.array(sz[2:]) * factor))).astype('int16') else:", "stds = std * np.ones(dim) print('WARNING: What should the spacing", "for the net kernel :param tensor: :param mean: :param std:", "os.path.abspath(sf) ext_s = os.path.splitext(abs_s)[1] abs_t = os.path.abspath(tf) root_t,ext_t = os.path.splitext(abs_t)", "I[:, c, ...], nrOfI, sz[2::], spacing) return m def compute_vector_momentum_from_scalar_momentum_multiN(lam,", "tensor[:, :,-3,:] tensor[:, :, :,-1] = tensor[:, :, :,-2] +", "map for resampling ID = compute_warped_image_multiNC(I, idDes, newspacing, spline_order, zero_boundary)", "1, padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim + 1,", "size). :param Ab: Parameter vectors B x pars (batch size", "2, and 3.') def set_affine_transform_to_identity(Ab): \"\"\"Sets the affine transformation as", "image :param spacing: spacing of image :return: returns the vector", "dimension dimxXxYxZ \"\"\" dim = len(sz) if dim == 1:", "float(v) def checkNan(x): \"\"\"\" input should be list of Variable", "\"\"\"Creates a view of A of dimension dim (by adding", "# a_12x+a_22y+b2 elif dim == 3: phiR[0, ...] = Ab[0]", "\"\"\" #todo: check that this is the correct way of", "torch Variable of given size. :param sz: just the spatial", "= f(input) end.record() # Waits for everything to finish running", "> dim: raise ValueError('Can only add dimensions, but not remove", "returns the warped image of size XxYxZ \"\"\" # implements", "newspacing, spline_order, zero_boundary) return ID if not is_numpy else ID.numpy(),", "kernel_size, stride=1, active_unit='relu', same_padding=False, bn=False, reverse=False, bias=False): super(ConvBnRel, self).__init__() padding", "desired size :param I: Input image (expected to be of", "pl, par_to_name_dict def remove_infs_from_variable(v): # 32 - bit floating point:", "= v.permute(0,2,3,4,1) return v def get_scalar(v): if isinstance(v, float): return", "return I1_warped def compute_warped_image(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps", "spacing[d] if sz[d]%2==0: #even id[d] -= spacing[d]*(sz[d]//2) else: #odd id[d]", "classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_rd_normal(m):", "an affine transform to a map. :param Ab: affine transform", "= fdt.dXc(I)*lam m[:, 1, :, :] = fdt.dYc(I)*lam elif dim", "tmp = MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad = True else: tmp = Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7))", "Variable \"\"\" return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for elem in x] def noramlized_spacing_to_smallest(spacing):", "idnp[1, :, :] = id[1] * spacing_ratio[1] # elif dim", "id = np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else: raise ValueError('Only dimensions 1-3 are currently", "0.02) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0)", "range(nr_of_images): set_affine_transform_to_identity(Ab[nrI, :]) def get_inverse_affine_param(Ab): \"\"\"Computes inverse of affine transformation.", "dim+1 array if dim == 1: idnp = np.zeros([1, sz[0]],", "np.mgrid[0:sz[0],0:sz[1]] elif dim==3: id = np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else: raise ValueError('Only dimensions", "# check that the batch size and the number of", "torch.abs(torch.log(max_std/stds))**omt_power # omt_const = omt_const/(torch.abs(torch.log(max_std / min_std)) ** omt_power) #", "if pow ==2: mask = mask**2 if pow ==3: mask", "(np.array(lowResSize[2::])-1) def _get_low_res_size_from_size(sz, factor): \"\"\"Returns the corresponding low-res size from", "if debugging: self.net = nn.Conv2d(2, 2, kernel_size, 1, padding=padding_size, bias=False,groups=2)", "pars from .spline_interpolation import SplineInterpolation_ND_BCXYZ import os try: from .libraries.functions.nn_interpolation", "converts it into a list of parameters that can be", "os.path.splitext(abs_t) abs_t_with_right_ext = root_t + ext_s if os.path.isfile(abs_t_with_right_ext): if os.path.samefile(abs_s,abs_t_with_right_ext):", "/ (2 * np.power(sig[1], 2.))) g = g/g.sum() return g", "shifts everything by 0.5*spacing # # :param sz: just the", "x] def noramlized_spacing_to_smallest(spacing): min_sp = np.min(spacing) spacing[spacing>min_sp]=min_sp return spacing def", "Ab.shape[1]==12: dim = 3 if dim not in [1, 2,", ":param spacing: array describing the spatial spacing :param desiredSize: array", "with spacing information [sx,sy,sz] :param dtype: numpy data-type ('float32', 'float64',", "mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask =", "torch.max(stds) # omt_const = torch.abs(torch.log(max_std/stds))**omt_power # omt_const = omt_const/(torch.abs(torch.log(max_std /", "column vector. For A =[a1,a2,a3], the parameter vector is simply", "spacing information [sx,sy,sz] # :param dtype: numpy data-type ('float32', 'float64',", "os try: from .libraries.functions.nn_interpolation import get_nn_interpolation except ImportError: print('WARNING: nn_interpolation", "dim == 1: id = np.mgrid[0:sz[0]] elif dim == 2:", "==3: v = v.permute(0,2,3,4,1) return v def get_scalar(v): if isinstance(v,", "= x; C = inv(A), d = -Cb :param Ab:", "image :return: returns the vector momentum \"\"\" fdt = fd.FD_torch(spacing)", "the low-res parameterization from image spacing :param spacing: image spacing", "[5,10,10] in 3D) :param nrOfI: number of images :return: returns", "X Example:: >>> mu, sig = [1,1], [1,1] >>> X", "1-3 are currently supported for the centered identity map') return", "np.array([dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,1e-7) def create_vector_parameter(nr_of_elements): \"\"\"Creates a vector parameters with", ":] = tensor[:, :, -2, :] + tensor[:, :, -2,", "np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] # else: # raise ValueError('Only dimensions 1-3", "nr = len(Ab) if nr==2: return 1 elif nr==6: return", "way of handling this # this is to make sure", "ind_pars is not a dictionary assume that they come from", "= len(img_sz) mask_sz = [1,1]+ list(img_sz) mask = AdaptVal(torch.zeros(*mask_sz)) if", "spatial sizes (e.g., [5] in 1D, [5,10] in 2D, [5,10,10]", "B C X Y Z :param spline_order: :param zero_boundary: :param", "= updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return updated_param def apply_affine_transform_to_map(Ab,phi): \"\"\"Applies an affine transform", "phi.view(torch.Size([1] + list(phi.size()))), spacing, spline_order, zero_boundary, use_01_input) return Iw.view(I0.size()) def", "m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: init.xavier_normal(m.weight.data, gain=1) elif", "numpy array on the cpu :param v: torch array :return:", "reverse=False, bias=False): super(ConvBnRel, self).__init__() padding = int((kernel_size - 1) //", "5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid: net += [nn.Sigmoid()] self.net", "vector) :return: Updated affine parameters \"\"\" dim = 0 if", "is not None: idDes = identity_map else: idDes = AdaptVal(torch.from_numpy(identity_map_multiN(desiredSizeNC,", "spacing: list with spacing information [sx,sy,sz] # :param dtype: numpy", "sz = Ab.size() nr_of_images = sz[0] for nrI in range(nr_of_images):", "vector :return: dimensionality of transform (1,2,or 3) \"\"\" nr =", "the spatial dimensions, i.e., XxYxZ # :param spacing: list with", "# return new_var_list # def identity_map(sz,spacing,dtype='float32'): \"\"\" Returns an identity", "list of parameters \"\"\" pl = [] for key in", "affine transformation of the form y=Ax+b stored in a column", "centered around 0) :param sz: size of an image in", ":return: returns the identity map of dimension dimxXxYxZ # \"\"\"", "g = g / g.sum() return g else: raise ValueError('Can", "from image spacing :param spacing: image spacing :param sz: size", "label warping method is not implemented\") return warped_label_map def t2np(v):", "len(sz) if dim == 1: id = np.mgrid[0:sz[0]] elif dim", "of given size :param sz: just the spatial sizes (e.g.,", "transform (1,2,or 3) \"\"\" nr = len(Ab) if nr==2: return", "- mu[1], 2.) / (2 * np.power(sig[1], 2.))) g =", "= torch.max(stds) # omt_const = torch.abs(torch.log(max_std/stds))**omt_power # omt_const = omt_const/(torch.abs(torch.log(max_std", "# print(classname) if classname.find('Conv') != -1: space_normal(m.weight.data) elif classname.find('Linear') !=", "dictionary which contains key value pairs for model parameters and", "map code') raise ValueError('Double check the spacing here before running", "0:sz[2]] else: raise ValueError('Only dimensions 1-3 are currently supported for", "gi[0], gi[1], gi[2] class ConvBnRel(nn.Module): # conv + bn (optional)", "+Cd[n,:,dim] updated_param = updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return updated_param def apply_affine_transform_to_map(Ab,phi): \"\"\"Applies an", "fd.FD_torch(spacing) dim = len(sz) m = create_ND_vector_field_variable_multiN(sz, nrOfI) if dim", "(nrC corresponds to dimension) :return: returns transformed maps \"\"\" sz", "the Fourier transform later!\\n\\n') return lowResSize def get_res_spacing_from_spacing(spacing, sz, lowResSize):", "idnp[0, :, :, :] = id[0] * spacing_ratio[0] # idnp[1,", "np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0, :, :] = id[0] idnp[1,", "= nn.Sequential(*net) elif self.net_sched == 'm_f_s_t': if debugging: self.net =", "padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim + 2, 20,", "ValueError('Only supports dimensions 1, 2, and 3.') Ab = Ab.view(Ab.shape[0],", "3D) :return: returns vector field of size dimxXxYxZ \"\"\" dim", "torch.cuda.DoubleTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float64').min))/reduction_factor, max=(np.asscalar(np.finfo('float64').max))/reduction_factor) elif v.data.dtype == torch.HalfTensor or", "sz[2]], dtype=dtype) # idnp[0, :, :, :] = id[0] *", "A else: return A.reshape([1]*(dim-current_dim)+list(A.shape)) def get_dim_of_affine_transform(Ab): \"\"\"Returns the number of", "def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing = np.min(spacing) #", "= np.zeros([3,sz[0], sz[1], sz[2]], dtype=dtype) idnp[0,:, :, :] = id[0]", "precise\" else: raise ValueError(\" the label warping method is not", "- 2 nrOfI = sz[0] if dim == 1: id", "phi[nrI, ...]) return phiR def compute_normalized_gaussian(X, mu, sig): \"\"\"Computes a", "0 to 9 are supported') if spline_order == 0: stn", "affine=False the output of BatchNorm is equivalent to considering gamma=1", "= nn.Linear(in_features, out_features) if active_unit == 'relu': self.active_unit = nn.ReLU(inplace=True)", "phi, spacing, sched='nn'): if sched == 'nn': warped_label_map = compute_warped_image_multiNC(label_map,", "the middle) if the sz is odd # Otherwise shifts", "Ab.size()[0] if nr_of_images != sz[0]: raise ValueError('Incompatible number of affine", ":param zero_boundary: :param identity_map: :return: \"\"\" if spacing is None:", "factor))).astype('int16') else: lowResSize[2::] = (np.ceil((np.array(sz[2:]) * np.array(factor)))).astype('int16') if lowResSize[-1] %", "image spacing. :param spacing: image spacing :param sz: size of", "spline_order) I1_warped = stn(I0, phi) return I1_warped def compute_warped_image(I0, phi,", "array :param dim: desired dimension of view :return: returns view", "n in range(nrOfI): id[n, ...] = centered_identity_map(sz[2::], spacing,dtype=dtype) return id", "= id[0] idnp[1, :, :, :] = id[1] idnp[2, :,", "dim+1 array if dim==1: idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0,:]", "it is a numpy array csz = np.array([dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0.,1e-7)", "dtype=dtype) # idnp[0, :, :, :] = id[0] * spacing_ratio[0]", "image, batchXxYxZ :param sz: size of image :param spacing: spacing", "to maps (for arbitrary batch size). :param Ab: affine transform", "nn.ELU(inplace=True) else: self.active_unit = None def forward(self, x): x =", "elif active_unit == 'elu': self.active_unit = nn.ELU(inplace=True) else: self.active_unit =", "= AdaptVal(torch.zeros(*mask_sz)) if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1", "x = self.active_unit(x) return x class FcRel(nn.Module): # fc+ relu(option)", ":, :, :] = id[2] else: raise ValueError('Only dimensions 1-3", ":] = id[0] idnp[1, :, :] = id[1] elif dim", "# dim = spacing.size # spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio)) # sp_sz", "the optimizer # (i.e., list and each list element has", "is not None else None for var in var_list] #", "identity map of dimension dimxXxYxZ \"\"\" dim = len(sz) if", "spacing be here? Needed for new identity map code') raise", "print(classname) if classname.find('Conv') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('Linear')", "X Y Z :param spacing: spx spy spz :param desiredSize:", "for elem in x] def noramlized_spacing_to_smallest(spacing): min_sp = np.min(spacing) spacing[spacing>min_sp]=min_sp", "for par in ind_pars: model_pars[par['name']] = par['model_params'] return model_pars def", ":return: \"\"\" if spacing is None: img_sz = I.shape[2:] spacing", "-np.power(X[2,:, :, :] - mu[2], 2.) / (2 * np.power(sig[2],", "using_sigmoid: net += [nn.Sigmoid()] self.net = nn.Sequential(*net) def prepare_data(self, m,", "elem in x] def noramlized_spacing_to_smallest(spacing): min_sp = np.min(spacing) spacing[spacing>min_sp]=min_sp return", "return MyTensor(*(csz.tolist())).normal_(0., 1e-7) def create_ND_vector_field_variable(sz): \"\"\"Create vector field torch Variable", "'relu': self.active_unit = nn.ReLU(inplace=True) elif active_unit == 'elu': self.active_unit =", "= dim self.net_sched = 'm_only' self.s = inputs['s'].detach() self.t =", "the corresponding low-res size from a (high-res) sz :param sz:", "for everything to finish running torch.cuda.synchronize() print(start.elapsed_time(end)) return output return", "nrI in range(nr_of_images): set_affine_transform_to_identity(Ab[nrI, :]) def get_inverse_affine_param(Ab): \"\"\"Computes inverse of", ". import module_parameters as pars from .spline_interpolation import SplineInterpolation_ND_BCXYZ import", "= nn.Sequential(*net) def prepare_data(self, m, new_s): input=None if self.net_sched ==", "lowResSize[-1] % 2 != 0: lowResSize[-1] -= 1 print( '\\n\\nWARNING:", "an affine transform to maps (for arbitrary batch size). :param", "2.)) -np.power(X[2,:, :, :] - mu[2], 2.) / (2 *", "# but values of this size should not occur in", "is not None: x = self.bn(x) if self.active_unit is not", "def checkNan(x): \"\"\"\" input should be list of Variable \"\"\"", "from . import smoother_factory as sf from .data_wrapper import USE_CUDA", "== 2: dim = 1 elif Ab.shape[1] == 6: dim", "format batchxnrCxXxYxZ (nrC corresponds to dimension) :return: returns transformed maps", "7, 8, 9]: raise ValueError('Currently only orders 0 to 9", "8, 9]: raise ValueError('Currently only orders 0 to 9 are", "I0: image to warp, image size XxYxZ :param phi: map", "# dim = len(sz) # if dim == 1: #", "if isinstance(v, float): return v elif isinstance(v, np.ndarray) and v.size", "here should be add assert assert abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))< 0.1, \"nn", "method [%s]' % init_type) if init_type == 'rd_normal': net.apply(weights_init_rd_normal) elif", "self.net_sched == 'm_f_s_t': input = organize_data(m, self.s, sched='depth_concat') input =", "== 'm_f_s_t': input = organize_data(m, self.s, sched='depth_concat') input = organize_data(input,", ":, :] - mu[1], 2.) / (2 * np.power(sig[1], 2.))", "Example:: >>> mu, sig = [1,1], [1,1] >>> X =", "2.))) g = g/g.sum() return g elif dim == 2:", "affine parameters. Formally: C(Ax+b)+d = CAx+Cb+d :param Ab: B x", "Ab.size() nr_of_images = sz[0] for nrI in range(nr_of_images): set_affine_transform_to_identity(Ab[nrI, :])", "= np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif dim == 2: id = np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif", "-= spacing[d]*(sz[d]//2) # else: # #odd # id[d] -= spacing[d]*((sz[d]+1)//2)", "(np.ceil((np.array(sz[2:]) * factor))).astype('int16') else: lowResSize[2::] = (np.ceil((np.array(sz[2:]) * np.array(factor)))).astype('int16') if", "= nn.Conv2d(self.dim+1, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net =", "ConvBnRel(20, self.dim, 5, active_unit=active_unit, same_padding=True, bn=using_bn)] if using_sigmoid: net +=", "by the ratio # if dim == 1: # idnp", "self.active_unit(x) return x class AdpSmoother(nn.Module): \"\"\" a simple conv. implementation,", "c, ...], I[:, c, ...], nrOfI, sz[2::], spacing) return m", "if dim != len(sz)-2: raise ValueError('Incompatible number of affine transforms')", "warping method is not implemented\") return warped_label_map def t2np(v): \"\"\"", "net.apply(weights_init_uniform) elif init_type == 'xavier': net.apply(weights_init_xavier) elif init_type == 'kaiming':", "the symlink os.symlink(abs_s,abs_t_with_right_ext) def combine_dict(d1,d2): \"\"\"Creates a dictionary which has", "= var_list # return new_var_list # def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): # if", "!= -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_kaiming(m): classname", "csz = np.array([nrOfI, dim]+list(csz)) if get_field_from_external_network: tmp = MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad", "target, sched='depth_concat'): if sched == 'depth_concat': input = torch.cat([moving, target],", "mask_sz = [1,1]+ list(img_sz) mask = AdaptVal(torch.zeros(*mask_sz)) if dim ==2:", "...] + Ab[2] * phi[1, ...] + Ab[4] # a_11x+a_21y+b1", "= len(sz) if dim == 1: id = np.mgrid[0:sz[0]] elif", "C(Ax+b)+d = CAx+Cb+d :param Ab: B x pars (batch size", "elif dim == 2: # idnp = np.zeros([2, sz[0], sz[1]],", ":] = fdt.dZc(I)*lam else: raise ValueError('Can only convert scalar to", "1: stn = STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing,", "transformed maps \"\"\" sz = phi.size() dim = get_dim_of_affine_transform(Ab[0,:]) nr_of_images", "= param['forward_model']['smoother']['omt_power'] # omt_weight_penalty = param['forward_model']['smoother']['omt_weight_penalty'] # min_std = torch.min(stds)", "return g else: raise ValueError('Can only compute Gaussians in dimensions", "given size. :param sz: just the spatial sizes (e.g., [5]", "= torch.abs(torch.log(max_std/stds))**omt_power # omt_const = omt_const/(torch.abs(torch.log(max_std / min_std)) ** omt_power)", "v elif isinstance(v, np.ndarray) and v.size == 1: return float(v)", "Ab[0]=1. Ab[4]=1. Ab[8]=1. else: raise ValueError('Only supports dimensions 1, 2,", "bit floating point: torch.DoubleTensor, torch.cuda.DoubleTensor # 16 - bit floating", "(for arbitrary batch size). :param Ab: affine transform parameter column", "size dimxXxYxZ :param spacing: image spacing [dx,dy,dz] :return: returns the", ":return: returns the identity map \"\"\" dim = len(sz) -", "print(classname) if classname.find('Conv') != -1: init.normal(m.weight.data) elif classname.find('Linear') != -1:", "== torch.HalfTensor or type(v.data) == torch.cuda.HalfTensor: return torch.clamp(v, min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor)", "* phi[1, ...] + Ab[4] # a_11x+a_21y+b1 phiR[1, ...] =", "g in range(nr_of_mg_weights): weights[:, g, ...] = gaussian_std_weights[g] tmp =", "def get_dim_of_affine_transform(Ab): \"\"\"Returns the number of dimensions corresponding to an", "Ab.view(Ab.shape[0], dim+1, dim).transpose(1,2) Ab_inv = torch.zeros_like(Ab) for n in range(Ab.shape[0]):", "ValueError('Only dimensions 1-3 are currently supported for the identity map')", "spacing_ratio =spacing/min_spacing # dim = spacing.size # spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio))", "nn.Conv2d(self.dim+1, self.dim, kernel_size, 1, padding=padding_size, bias=False) else: net = \\", "field of size nrOfIxdimxXxYxZ \"\"\" nr_of_mg_weights = len(gaussian_std_weights) csz =", "phiR[1, ...] = Ab[1] * phi[0, ...] + Ab[3] *", ":return: numpy array \"\"\" dim = len(v.shape)-2 if dim ==2:", "\"\"\" from __future__ import print_function from __future__ import absolute_import #", "v.permute(0,2,3,4,1) return v def get_scalar(v): if isinstance(v, float): return v", "dimensions 1, 2, and 3.') def set_affine_transform_to_identity_multiN(Ab): \"\"\"Set the affine", "dtype=dtype) # idnp[0, :, :] = id[0] * spacing_ratio[0] #", "# idnp[0, :] = id[0] * spacing_ratio[0] # elif dim", "todo: check that this is the correct way of doing", "x param. vector) :param phi: maps; format batchxnrCxXxYxZ (nrC corresponds", "# omt_const = omt_const/(torch.abs(torch.log(max_std / min_std)) ** omt_power) # omt_const", "dimensions 1-3 are currently supported for the centered identity map')", "= np.array( id.astype(dtype) ) if dim==1: id = id.reshape(1,sz[0]) #", "using_sigmoid=False , kernel_size=5): # return the self.net and self.net_input padding_size", "Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7)) def create_ND_vector_field_parameter_multiN(sz, nrOfI=1,get_field_from_external_network=False): \"\"\"Create vector field torch Parameter", "zero_boundary=False, identity_map=None): \"\"\" Resample an image to a given desired", "to an optimizer. :param pd: parameter dictionary :return: list of", "sz[0] nrOfC = sz[1] desiredSizeNC = np.array([nrOfI, nrOfC] + list(desiredSize))", "desiredSize: array for the desired size (excluding B and C,", ":return: returns transformed maps \"\"\" sz = phi.size() dim =", "elif v.data.dtype == torch.DoubleTensor or type(v.data) == torch.cuda.DoubleTensor: return torch.clamp(v,", "transformation as given by the column vector Ab to the", "= moving-target return input def bh(m,gi,go): print(\"Grad Input\") print((torch.sum(gi[0].data), torch.sum(gi[1].data)))", "sz[1] for c in range(nrOfC): # loop over all the", "evaluated at coordinates in X Example:: >>> mu, sig =", "are currently supported for the centered identity map') # #", "= torch.cat((moving, target), dim=3) elif sched == 'list_concat': input =", "(batch size x param. vector) :param phi: maps; format batchxnrCxXxYxZ", "\\sqrt{Var[x] + \\epsilon}} * gamma + beta #When affine=False the", "returns view of A of appropriate dimension \"\"\" current_dim =", "dimxXxYxZ :param spacing: image spacing [dx,dy,dz] :return: returns the warped", "values of this size should not occur in practice anyway", "in 3D) :return: returns vector field of size dimxXxYxZ \"\"\"", "affine=True) if bn else None if active_unit == 'relu': self.active_unit", "prepare_data(self, m, new_s): input=None if self.net_sched == 'm_only': input =", "dim): \"\"\"Creates a view of A of dimension dim (by", "elif dim == 3: m[:, 0, :, :, :] =", "3, sz[2], sz[3], sz[4]], dtype=dtype) else: raise ValueError('Only dimensions 1-3", "sz: just the spatial dimensions, i.e., XxYxZ :param spacing: list", ":,-3,:] tensor[:, :, :,-1] = tensor[:, :, :,-2] + tensor[:,", "trans. :return: \"\"\" sz = Ab.size() nr_of_images = sz[0] for", "net = \\ [ConvBnRel(self.dim + 2, 20, 5, active_unit=active_unit, same_padding=True,", "ValueError('Can only add dimensions, but not remove them') if current_dim", "= torch.cat((moving.unsqueeze(0),target.unsqueeze(0)),dim=0) elif sched == 'difference': input = moving-target return", "np.array(sz) # just to make sure it is a numpy", "_compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) else: raise ValueError('Images can only be", "if debugging: self.net = nn.Conv2d(self.dim + 2, self.dim, kernel_size, 1,", "np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0,:, :] = id[0] idnp[1,:, :]", "'m_only': input = m elif self.net_sched == 'm_f_s': input =", "sz[3]], dtype=dtype) elif dim == 3: id = np.zeros([nrOfI, 3,", "transforms to the identity (in the case of arbitrary batch", "nn.Sequential(*net) def prepare_data(self, m, new_s): input=None if self.net_sched == 'm_only':", "zero_boundary=False, use_01_input=True): if spline_order not in [0, 1, 2, 3,", "return np.array(sz) else: low_res_sz = np.array(sz) low_res_sz[2::] = (np.ceil((np.array(sz[2::]) *", "= MyTensor(*(csz.tolist())).normal_(0.,1e-7) tmp.requires_grad = True else: tmp = Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) return", "np.min(spacing) # spacing_ratio = spacing/min_spacing # # # # now", "identity transform. :param Ab: Affine parameter vector (will be overwritten", "Cd): \"\"\"Update affine parameters. Formally: C(Ax+b)+d = CAx+Cb+d :param Ab:", "\"\"\"Sets the affine transformation as given by the column vector", "g elif dim == 3: g = np.exp(-np.power(X[0,:, :, :]", "dim = len(sz) if dim == 1: id = np.mgrid[0:sz[0]]", "def update_affine_param(Ab, Cd): \"\"\"Update affine parameters. Formally: C(Ax+b)+d = CAx+Cb+d", "shifts everything by 0.5*spacing :param sz: just the spatial dimensions,", "dim == 1: id = np.zeros([nrOfI,1,sz[2]],dtype=dtype) elif dim == 2:", "dim ==3: v = v.permute(0,2,3,4,1) return v def get_scalar(v): if", "kernel_size, 1, padding=padding_size, bias=False,groups=2) else: net = \\ [ConvBnRel(self.dim, 20,", "package in a more meaningful way. \"\"\" from __future__ import", "map of dimension dimxXxYxZ # \"\"\" # dim = len(sz)", "return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for elem in x] def noramlized_spacing_to_smallest(spacing): min_sp =", "torch.nn.parameter import Parameter from torch.autograd import Variable from .libraries.modules.stn_nd import", "sz[0] for nrI in range(nr_of_images): set_affine_transform_to_identity(Ab[nrI, :]) def get_inverse_affine_param(Ab): \"\"\"Computes", "create_ND_vector_field_variable(sz): \"\"\"Create vector field torch Variable of given size. :param", "phi[2, ...] + Ab[9] phiR[1, ...] = Ab[1] * phi[0,", "is not None: x = self.active_unit(x) return x class AdpSmoother(nn.Module):", ":,:, :, :, -1] = tensor[:, :, :, -2] +", ":param I: image, BxCxXxYxZ :param sz: size of image :param", "elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask)", "+= [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched =='m_f_s': if debugging:", "Returns a centered identity map (with 0 in the middle)", "in range(nr_of_images): set_affine_transform_to_identity(Ab[nrI, :]) def get_inverse_affine_param(Ab): \"\"\"Computes inverse of affine", "in ind_pars: model_pars[par['name']] = par['model_params'] return model_pars def compute_vector_momentum_from_scalar_momentum_multiNC(lam, I,", "identity map :param sz: size of an image in BxCxXxYxZ", "id[0] idnp[1, :, :] = id[1] elif dim == 3:", "size x param. vector) :param phi: maps; format batchxnrCxXxYxZ (nrC", ":return: True if NaNs are present, False else \"\"\" return", "Ab[3] * phi[1, ...] + Ab[5] # a_12x+a_22y+b2 elif dim", "Needed for new identity map code') raise ValueError('Double check the", "are present, False else \"\"\" return (x != x).any() def", "I0: image to warp, image size BxCxXxYxZ :param phi: map", "[dx,dy,dz] :return: returns the warped image of size XxYxZ \"\"\"", "size of image :param spacing: spacing of image :return: returns", "= np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] # else: # raise ValueError('Only dimensions", "cpu :param v: torch array :return: numpy array \"\"\" return", "None if active_unit == 'relu': self.active_unit = nn.ReLU(inplace=True) elif active_unit", "range(nrOfI): id[n, ...] = centered_identity_map(sz[2::], spacing,dtype=dtype) return id def identity_map_multiN(sz,spacing,dtype='float32'):", "compute_warped_image(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image. :param I0:", "elif self.net_sched == 'm_f_s': input = organize_data(m,self.s,sched='depth_concat') elif self.net_sched ==", "could be, # but values of this size should not", "I.shape[0] != identity_map.shape[0]: n_batch = I.shape[0] desiredSize = desiredSize.copy() desiredSize[0]", "spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio)) # sp_sz = [1]+[dim] +[1]*dim # spacing_ratio_t", "identity_map=None): \"\"\" Resample an image to a given desired size", "self.net = nn.Sequential(*net) elif self.net_sched == 'm_d_s_f_t': if debugging: self.net", "spacing,spline_order=0,zero_boundary=True) # check if here should be add assert assert", "the warped image of size XxYxZ \"\"\" # implements this", "dim]+list(csz)) return MyTensor(*(csz.tolist())).normal_(0., 1e-7) def create_ND_vector_field_variable(sz): \"\"\"Create vector field torch", "def compute_omt_const(stds,param,dim): # omt_power = param['forward_model']['smoother']['omt_power'] # omt_weight_penalty = param['forward_model']['smoother']['omt_weight_penalty']", "= np.prod(np.array(sz)) condition = True if type(v.data) == torch.cuda.FloatTensor or", "tensor[:, :, :, -2] - tensor[:, :, :, -3] def", ":param dtype: numpy data-type ('float32', 'float64', ...) # :return: returns", "# sp_sz = [1]+[dim] +[1]*dim # spacing_ratio_t = spacing_ratio_t.view(*sp_sz) #", "\"\"\" Returns the corresponding low-res size from a (high-res) sz", "the spatial sizes (e.g., [5] in 1D, [5,10] in 2D,", "isinstance(v, np.ndarray) and v.size == 1: return float(v) def checkNan(x):", "if spline_order not in [0, 1, 2, 3, 4, 5,", "return mask # def compute_omt_const(stds,param,dim): # omt_power = param['forward_model']['smoother']['omt_power'] #", "different dimensions :return: Normalized Gaussian evaluated at coordinates in X", "mu[0], 2.)/(2*np.power(sig[0], 2.))) g = g/g.sum() return g elif dim", "tensor[:, :, :, -3] def get_resampled_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False,", "= tm_param updated_param[n,:,dim] = torch.matmul(Cd[n,:,:dim], Ab[n,:,dim]) +Cd[n,:,dim] updated_param = updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1)", "weights_init_uniform(m): classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1:", "= torch.matmul(Cd[n,:,:dim], Ab[n,:,dim]) +Cd[n,:,dim] updated_param = updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return updated_param def", "raise NotImplementedError('initialization method [%s] is not implemented' % init_type) def", "sched == 'nn': warped_label_map = compute_warped_image_multiNC(label_map, phi, spacing,spline_order=0,zero_boundary=True) # check", "dimension dimxXxYxZ # \"\"\" # dim = len(sz) # if", "ValueError(\" the label warping method is not implemented\") return warped_label_map", "v.permute(0,2,3,1) if dim ==3: v = v.permute(0,2,3,4,1) return v def", "[] for key in pd: pl.append(pd[key]) return pl def get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd):", "parameters \"\"\" pl = [] for key in pd: pl.append(pd[key])", "map of dimension dimxXxYxZ \"\"\" dim = len(sz) if dim", "def apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies an affine transform to maps (for arbitrary", "elif init_type == 'orthogonal': net.apply(weights_init_orthogonal) else: raise NotImplementedError('initialization method [%s]", "init_type) if init_type == 'rd_normal': net.apply(weights_init_rd_normal) elif init_type == 'normal':", "np.mgrid[0:sz[0], 0:sz[1]] elif dim == 3: id = np.mgrid[0:sz[0], 0:sz[1],", "the moment). ' 'Some functionality may not be available.') def", "= desiredSize[2:] is_numpy = False if not isinstance(I, torch.Tensor): I", "zero_boundary=False, identity_map=None): \"\"\" :param I: B C X Y Z", ":return: returns a tuple: the downsampled image, the new spacing", "= g/g.sum() return g elif dim == 2: g =", "STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped =", "number of vector elements :return: returns the parameter vector \"\"\"", "== 6: dim = 2 elif Ab.shape[1] == 12: dim", "elif dim == 3: phiR[0, ...] = Ab[0] * phi[0,", "classname = m.__class__.__name__ # print(classname) if classname.find('Conv') != -1: space_normal(m.weight.data)", "- np.power(X[1,:, :] - mu[1], 2.) / (2 * np.power(sig[1],", "in [1,2,3]: raise ValueError('Only supports dimensions 1, 2, and 3.')", "assert abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))< 0.1, \"nn interpolation is not precise\" else:", "def compute_vector_momentum_from_scalar_momentum_multiN(lam, I, nrOfI, sz, spacing): \"\"\"Computes the vector momentum", "sz[2], sz[3], sz[4]], dtype=dtype) else: raise ValueError('Only dimensions 1-3 are", "Normalized Gaussian evaluated at coordinates in X Example:: >>> mu,", "== 1: id = np.zeros([nrOfI, 1, sz[2]], dtype=dtype) elif dim", "idnp def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05): \"\"\"generate a smooth weight mask for", "tensor: :param mean: :param std: :return: \"\"\" if isinstance(tensors, Variable):", "\"\"\" desiredSize = desiredSize[2:] is_numpy = False if not isinstance(I,", "3.') return phiR def apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies an affine transform to", ":return: returns vector field of size nrOfIxdimxXxYxZ \"\"\" nr_of_mg_weights =", "return Iw.view(I0.size()) def compute_warped_image_multiNC(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps", "str(factor)) return sz else: lowResSize = np.array(sz) if not isinstance(factor,", "init_type == 'kaiming': net.apply(weights_init_kaiming) elif init_type == 'orthogonal': net.apply(weights_init_orthogonal) else:", "tmp def create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1, nrOfC=1): \"\"\" Create vector field torch", "now get it into range [0,(sz-1)*spacing]^d id = np.array(id.astype(dtype)) if", "-= spacing[d]*(sz[d]//2) else: #odd id[d] -= spacing[d]*((sz[d]+1)//2) # and now", "spacing, dtype='float32'): # \"\"\" # Returns a centered identity map", "torch.DoubleTensor, torch.cuda.DoubleTensor # 16 - bit floating point: torch.HalfTensor, torch.cuda.HalfTensor", "dim = get_dim_of_affine_transform(Ab[0,:]) nr_of_images = Ab.size()[0] if nr_of_images != sz[0]:", "Ab.zero_() Ab[0]=1. Ab[4]=1. Ab[8]=1. else: raise ValueError('Only supports dimensions 1,", "elif dim == 2: return _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif", "ValueError('Only supports dimensions 1, 2, and 3.') return phiR def", "id[d] *= spacing[d] if sz[d]%2==0: #even id[d] -= spacing[d]*(sz[d]//2) else:", "of an image in BxCxXxYxZ format :param spacing: list with", "x = self.bn(x) if self.active_unit is not None: x =", "sp_sz = [1]+[dim] +[1]*dim # spacing_ratio_t = spacing_ratio_t.view(*sp_sz) # new_var_list", "2: phiR[0, ...] = Ab[0] * phi[0, ...] + Ab[2]", "be here? Needed for new identity map code') raise ValueError('Double", "format) :param spacing: array describing the spatial spacing :param desiredSize:", "#odd # id[d] -= spacing[d]*((sz[d]+1)//2) # # # and now", "torch.HalfTensor, torch.cuda.HalfTensor # todo: maybe find a cleaner way of", "!= -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_normal(m): classname", "net.apply(weights_init_kaiming) elif init_type == 'orthogonal': net.apply(weights_init_orthogonal) else: raise NotImplementedError('initialization method", "np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) - np.power(X[1,:, :] - mu[1], 2.) / (2 *", "csz = np.array(sz) # just to make sure it is", "way of doing it return spacing * (np.array(sz[2::]) - 1)", "= Ab.view(Ab.shape[0], dim+1, dim).transpose(1, 2) Cd = Cd.view(Cd.shape[0], dim+1, dim).transpose(1,", "tm_param updated_param[n,:,dim] = torch.matmul(Cd[n,:,:dim], Ab[n,:,dim]) +Cd[n,:,dim] updated_param = updated_param.transpose(1,2).contiguous().view(Ab.shape[0],-1) return", "map for the warping, size BxdimxXxYxZ :param spacing: image spacing", "return inv_affine_param def update_affine_param(Ab, Cd): \"\"\"Update affine parameters. Formally: C(Ax+b)+d", "2.)/(2*np.power(sig[0], 2.))) g = g/g.sum() return g elif dim ==", "idnp[0,:, :] = id[0] idnp[1,:, :] = id[1] elif dim==3:", "os.path.splitext(abs_s)[1] abs_t = os.path.abspath(tf) root_t,ext_t = os.path.splitext(abs_t) abs_t_with_right_ext = root_t", "else: raise ValueError('Only supports dimensions 1, 2, and 3.') def", "\"\"\" if (factor is None): print('WARNING: Could not compute low_res_size", "self.t = inputs['t'].detach() self.mask = Parameter(torch.cat([torch.ones(inputs['s'].size())]*dim, 1), requires_grad = True)", "-1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_xavier(m): classname =", "organize_data(input, self.t, sched='depth_concat') return input def forward(self, m,new_s=None): m =", "beta=0 as constants. self.bn = nn.BatchNorm2d(out_channels, eps=0.0001, momentum=0, affine=True) if", "supports dimensions 1, 2, and 3.') def set_affine_transform_to_identity(Ab): \"\"\"Sets the", "phi, spacing, spline_order, zero_boundary=False, use_01_input=True): \"\"\"Warps image. :param I0: image", ":] = fdt.dXc(I)*lam m[:, 1, :, :] = fdt.dYc(I)*lam elif", "None): print('WARNING: Could not compute low_res_size as factor was '", "2, 20, 5, active_unit=active_unit, same_padding=True, bn=using_bn), ConvBnRel(20, self.dim, 5, active_unit=active_unit,", "batch size and the number of channels is the same", "BxCxXxYxZ :param sz: size of image :param spacing: spacing of", "nrOfI) # attention that the second dimension here is image", "ConvBnRel(nn.Module): # conv + bn (optional) + relu def __init__(self,", "'model_params' for par in ind_pars: model_pars[par['name']] = par['model_params'] return model_pars", ":] = fdt.dXc(I)*lam elif dim == 2: m[:, 0, :,", "A of appropriate dimension \"\"\" current_dim = len(A.shape) if current_dim", "def centered_identity_map_multiN(sz, spacing, dtype='float32'): \"\"\" Create a centered identity map", "factor): \"\"\" Returns the corresponding low-res size from a (high-res)", "== 'width_concat': input = torch.cat((moving, target), dim=3) elif sched ==", "in the right format model_pars = ind_pars else: # if", "[torch.sqrt(std_w) for std_w in gaussian_std_weights] for g in range(nr_of_mg_weights): weights[:,", "phi: map; format nrCxXxYxZ (nrC corresponds to dimension) :return: returns", "create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1, nrOfC=1): \"\"\" Create vector field torch Parameter of", "self.net_sched == 'm_d_s': if debugging: self.net = nn.Conv2d(self.dim+1, self.dim, kernel_size,", "get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) if pow ==2: mask = mask**2", "elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def", "== 'm_d_s_f_t': if debugging: self.net = nn.Conv2d(self.dim + 2, self.dim,", "that the batch size and the number of channels is", "...] = gaussian_std_weights[g] tmp = AdaptVal(weights) if get_preweight_from_network: tmp.requires_grad =", "= torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) start.record() output = f(input) end.record()", "par['model_params'] return model_pars def compute_vector_momentum_from_scalar_momentum_multiNC(lam, I, sz, spacing): \"\"\"Computes the", "= create_ND_vector_field_variable_multiN(sz[2::], nrOfI) # attention that the second dimension here", "sig = [1,1], [1,1] >>> X = [0,0] >>> print(compute_normalized_gaussian(X,", "of size nrOfIxdimxXxYxZ \"\"\" nr_of_mg_weights = len(gaussian_std_weights) csz = np.array(sz)", "elif dim==2: idnp = np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0,:, :]", "spacing: array describing the spatial spacing :param desiredSize: array for", "torch.empty(*csz) # set the default if sched =='w_K_w': gaussian_std_weights =", "= np.min(spacing) # spacing_ratio =spacing/min_spacing # dim = spacing.size #", "== 'uniform': net.apply(weights_init_uniform) elif init_type == 'xavier': net.apply(weights_init_xavier) elif init_type", "torch array :return: numpy array \"\"\" return (v.detach()).cpu().numpy() def cxyz_to_xyzc(", "an input to an optimizer. :param pd: parameter dictionary :return:", "vector parameters with a specified number of elements. :param nr_of_elements:", "identity map') return idnp def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05): \"\"\"generate a smooth", "self.fc(x) if self.active_unit is not None: x = self.active_unit(x) return", "elif classname.find('Linear') != -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') != -1:", "len(sz) if dim==1: id = np.mgrid[0:sz[0]] elif dim==2: id =", "a dummy first index for d in range(dim): id[d]*=spacing[d] #id[d]*=2./(sz[d]-1)", "m[:, 0, :, :] = fdt.dXc(I)*lam m[:, 1, :, :]", "= (np.ceil((np.array(sz[2::]) * factor))).astype('int16') return low_res_sz def _compute_low_res_image(I, spacing, low_res_size,", "dim==3: tensor[:, :,:, -1,:, :] = tensor[:, :, -2, :]", "list(desiredSize)) newspacing = spacing * ((sz[2::].astype('float') - 1.) / (", "1 print( '\\n\\nWARNING: forcing last dimension to be even: fix", "param. vector) :param phi: maps; format batchxnrCxXxYxZ (nrC corresponds to", "# this is to make sure that subsequent sums work", "get_parameter_list_from_parameter_dict; but also returns a dictionary which keeps track of", "sz = [1]+ [len(stds)] +[1]*(dim+1) # return omt_const.view(*sz) def get_single_gaussian_smoother(gaussian_std,sz,spacing):", "spline_order: :param zero_boundary: :param identity_map: :return: \"\"\" if spacing is", "dim self.net_sched = 'm_only' self.s = inputs['s'].detach() self.t = inputs['t'].detach()", "if the sz is odd # Otherwise shifts everything by", "import STN_ND_BCXYZ from .data_wrapper import AdaptVal from .data_wrapper import MyTensor", "array on the cpu :param v: torch array :return: numpy", "vectors (batch size x param. vector) :param phi: maps; format", "sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') elif self.net_sched == 'm_f_s_t':", "spacing here before running this code') spacing = np.ones(dim) centered_id", "9 are supported') if spline_order == 0: stn = STN_ND_BCXYZ(spacing,", "sz else: lowResSize = np.array(sz) if not isinstance(factor, list): lowResSize[2::]", "= None def forward(self, x): x = self.fc(x) if self.active_unit", "= np.mgrid[0:sz[0],0:sz[1],0:sz[2]] else: raise ValueError('Only dimensions 1-3 are currently supported", "image to warp, image size BxCxXxYxZ :param phi: map for", "that can be used as an input to an optimizer.", "field torch Variable of given size :param sz: just the", "X = [0,0] >>> print(compute_normalized_gaussian(X, mu, sig) \"\"\" dim =", "str( type(v.data))) def lift_to_dimension(A, dim): \"\"\"Creates a view of A", "centered identity map (with 0 in the middle) if the", "fdt.dZc(I)*lam else: raise ValueError('Can only convert scalar to vector momentum", "dim == 3: id = np.zeros([nrOfI, 3, sz[2], sz[3], sz[4]],", "1: idnp = np.zeros([1, sz[0]], dtype=dtype) idnp[0, :] = id[0]", "1) ########################################## Adaptive Net ###################################################3 def space_normal(tensors, std=0.1): \"\"\" space", "if here should be add assert assert abs(torch.sum(warped_label_map.data -warped_label_map.data.round()))< 0.1,", "to 9 are supported') if spline_order == 0: # return", ":, -1,:] = tensor[:, :,-2,:] + tensor[:, :,-2,:] - tensor[:,", "channels :return: returns vector field of size nrOfIxnrOfCxXxYxZ \"\"\" csz", "import MyTensor from . import smoother_factory as sf from .data_wrapper", "in range(nr_of_images): phiR[nrI, ...] = apply_affine_transform_to_map(Ab[nrI, :], phi[nrI, ...]) return", ":, :, :] = fdt.dXc(I)*lam m[:, 1, :, :, :]", "= id[0] * spacing_ratio[0] # idnp[1, :, :] = id[1]", "1.) / ( desiredSizeNC[2::].astype('float') - 1.)) ########################################### if identity_map is", "desiredSizeNC = np.array([nrOfI, nrOfC] + list(desiredSize)) newspacing = spacing *", "low-res size from a (high-res) sz. :param sz: size (high-res)", "if (factor is None) or (factor >= 1): print('WARNING: Could", "not implemented' % init_type) def organize_data(moving, target, sched='depth_concat'): if sched", "== 'm_d_s': input = organize_data(m, new_s, sched='depth_concat') elif self.net_sched ==", "spacing of low res parameterization \"\"\" # todo: check that", "can be used as an input to an optimizer. :param", "mask = mask*mask*mask return mask # def compute_omt_const(stds,param,dim): # omt_power", "the identity map \"\"\" dim = len(sz) - 2 nrOfI", "def noramlized_spacing_to_smallest(spacing): min_sp = np.min(spacing) spacing[spacing>min_sp]=min_sp return spacing def time_warped_function(f):", "id.reshape(1,sz[0]) # add a dummy first index for d in", "will remove, currently fix for symmetric training if I.shape[0] !=", "is None): print('WARNING: Could not compute low_res_size as factor was", "self.s, sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat') elif self.net_sched ==", ":, :] = id[1] idnp[2, :, :, :] = id[2]", "spacing_ratio = spacing/min_spacing # # # # now get it", ":param spline_order: :param zero_boundary: :param identity_map: :return: \"\"\" if spacing", "== 'm_d_s_f_t': input = organize_data(m, new_s, sched='depth_concat') input = organize_data(input,", "x pars (batch size x param. vector) :return: Inverse of", "and the number of channels is the same nrOfI =", "corresponding low-res size from a (high-res) sz :param sz: size", "dimensions, i.e., XxYxZ # :param spacing: list with spacing information", "be of BxCxXxYxZ format) :param spacing: array describing the spatial", "of affine transformation. Formally: C(Ax+b)+d = CAx+Cb+d = x; C", "can only be warped in dimensions 1 to 3') def", "\"\"\" fdt = fd.FD_torch(spacing) dim = len(sz) m = create_ND_vector_field_variable_multiN(sz,", "tensor[:, :, -3, :] tensor[:, :,:, :, -1, :] =", "and C, i.e, 1 entry for 1D, 2 for 2D,", "= [0,0] >>> print(compute_normalized_gaussian(X, mu, sig) \"\"\" dim = len(mu)", "# sz = [1]+ [len(stds)] +[1]*(dim+1) # return omt_const.view(*sz) def", "parameters and converts it into a list of parameters that", "np.mgrid[0:sz[0]] # elif dim == 2: # id = np.mgrid[0:sz[0],", "number of affine transforms') phiR = MyTensor(sz).zero_().type_as(phi) for nrI in", "factor))).astype('int16') return low_res_sz def _compute_low_res_image(I, spacing, low_res_size, spline_order): import mermaid.image_sampling", "it into range [0,(sz-1)*spacing]^d id = np.array(id.astype(dtype)) if dim ==", "nothing to do here, these are already the same file", "None: # todo will remove, currently fix for symmetric training", "C X Y Z :param spline_order: :param zero_boundary: :param identity_map:", "dimxXxYxZ \"\"\" dim = len(sz) if dim==1: id = np.mgrid[0:sz[0]]", "and 3 for 3D) :return: returns a tuple: the downsampled", ":return: returns the parameter vector \"\"\" return Parameter(MyTensor(nr_of_elements).normal_(0., 1e-7)) def", "range [0,(sz-1)*spacing]^d id = np.array( id.astype(dtype) ) if dim==1: id", "dimensions 1, 2, and 3.') phiR = MyTensor(sz).zero_().type_as(phi) if dim", "def create_ND_vector_field_variable(sz): \"\"\"Create vector field torch Variable of given size.", "stn = STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing,", "and each list element has a dictionary with keys 'name'", "bn (optional) + relu def __init__(self, in_channels, out_channels, kernel_size, stride=1,", "nrOfC = sz[1] for c in range(nrOfC): # loop over", "keeps track of the keys based on memory id. :param", "2).contiguous().view(Ab.shape[0], -1) return inv_affine_param def update_affine_param(Ab, Cd): \"\"\"Update affine parameters.", "gain=1) elif classname.find('Linear') != -1: init.xavier_normal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') !=", ":param factor: low-res factor (needs to be <1) :return: low", "maps (for arbitrary batch size). :param Ab: affine transform parameter", "n in range(tensors.size()[0]): for c in range(tensors.size()[1]): dim = tensors[n][c].dim()", "new_var_list # def recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing =", "kernel_size=5): # return the self.net and self.net_input padding_size = (kernel_size-1)//2", "\"\"\" dim = len(v.shape)-2 if dim ==2: v = v.permute(0,2,3,1)", "x pars (batch size x param. vector); will be overwritten", "first index for d in range(dim): id[d] *= spacing[d] if", "from .data_wrapper import MyTensor from . import smoother_factory as sf", "init_type) def organize_data(moving, target, sched='depth_concat'): if sched == 'depth_concat': input", "# spacing_ratio =min_spacing/spacing # dim = spacing.size # spacing_ratio_t =", "Ab.shape[1] == 2: dim = 1 elif Ab.shape[1] == 6:", "std_w in gaussian_std_weights] for g in range(nr_of_mg_weights): weights[:, g, ...]", "mus = np.zeros(dim) stds = std * np.ones(dim) print('WARNING: What", "parameters. Formally: C(Ax+b)+d = CAx+Cb+d :param Ab: B x pars", "min=(np.asscalar(np.finfo('float16').min))/reduction_factor, max=(np.asscalar(np.finfo('float16').max))/reduction_factor) else: raise ValueError('Unknown data type: ' + str(", "= get_dim_of_affine_transform(Ab[0,:]) nr_of_images = Ab.size()[0] if nr_of_images != sz[0]: raise", "3') def _get_low_res_spacing_from_spacing(spacing, sz, lowResSize): \"\"\"Computes spacing for the low-res", "return 1 elif nr==6: return 2 elif nr==12: return 3", "spline_order=spline_order, zero_boundary=zero_boundary, identity_map=identity_map) return resampled def resample_image(I, spacing, desiredSize, spline_order=1,", "map; format nrCxXxYxZ (nrC corresponds to dimension) :return: returns transformed", "SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped = stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_2d(I0,", "nrOfI: number of images :param nrOfC: number of channels :return:", "!= -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('Linear') != -1: init.kaiming_normal(m.weight.data,", "Parameter(tmp) return tmp def create_ND_scalar_field_parameter_multiNC(sz, nrOfI=1, nrOfC=1): \"\"\" Create vector", "generate displacement field \"\"\" def __init__(self, inputs, dim, net_sched=None): #", "= MyTensor(sz).zero_().type_as(phi) if dim == 1: phiR = phi *", "here is image dim, not nrOfC nrOfC = sz[1] for", "= STN_ND_BCXYZ(spacing,zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped", "channels is the same nrOfI = sz[0] nrOfC = sz[1]", "could not be imported (only supported in CUDA at the", "a dictionary could contain ['s'],['t'] super(AdpSmoother, self).__init__() self.dim = dim", "def set_affine_transform_to_identity(Ab): \"\"\"Sets the affine transformation as given by the", "phi[0, ...] + Ab[4] * phi[1, ...] + Ab[7] *", "id[1] elif dim == 3: idnp = np.zeros([3, sz[0], sz[1],", "not None: x = self.bn(x) if self.active_unit is not None:", "(batch size x param. vector) :return: Updated affine parameters \"\"\"", "...] = Ab[1] * phi[0, ...] + Ab[3] * phi[1,", ":] - mu[0], 2.) / (2 * np.power(sig[0], 2.)) -np.power(X[1,:,", "image to warp, image size XxYxZ :param phi: map for", "bias=False,groups=2) else: net = \\ [ConvBnRel(self.dim, 20, 5, active_unit=active_unit, same_padding=True,", "in x] def noramlized_spacing_to_smallest(spacing): min_sp = np.min(spacing) spacing[spacing>min_sp]=min_sp return spacing", "I1_warped = stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_2d(I0, phi, spacing,", "input = organize_data(m, self.s, sched='depth_concat') input = organize_data(input, self.t, sched='depth_concat')", "3D) :param nrOfI: number of images :return: returns vector field", "* np.ones(dim) print('WARNING: What should the spacing be here? Needed", "image (expected to be of BxCxXxYxZ format) :param spacing: array", "phi) return I1_warped def compute_warped_image(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True):", "to warp, image size XxYxZ :param phi: map for the", "else: # if ind_pars is not a dictionary assume that", "dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm = get_single_gaussian_smoother(smoother_std,img_sz,spacing)", "input def forward(self, m,new_s=None): m = m * self.mask input", "optimizer. :param pd: parameter dictionary :return: list of parameters \"\"\"", ":param sz: size of image :param lowResSize: size of low", "torch.zeros_like(Ab) for n in range(Ab.shape[0]): tm_inv = torch.inverse(Ab[n, :, :dim])", "returns the identity map \"\"\" dim = len(sz)-2 nrOfI =", "of images m = create_ND_vector_field_variable_multiN(sz[2::], nrOfI) # attention that the", "def create_symlink_with_correct_ext(sf, tf): abs_s = os.path.abspath(sf) ext_s = os.path.splitext(abs_s)[1] abs_t", "the spacing be here? Needed for new identity map code')", "tm_inv Ab_inv[n, :, dim] = - torch.matmul(tm_inv, Ab[n,:,dim]) inv_affine_param =", "def _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not in [0,", "identity map \"\"\" dim = len(sz) - 2 nrOfI =", "returns it as a numpy array on the cpu :param", "the net kernel :param tensor: :param mean: :param std: :return:", "== 1: # id = np.mgrid[0:sz[0]] # elif dim ==", "the scalar momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar momentum, BxCxXxYxZ", "= len(sz) csz = np.array(sz) # just to make sure", "get_single_gaussian_smoother(smoother_std,img_sz,spacing) mask = sm.smooth(mask) return mask.detach() def momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2): \"\"\"generate", "= fdt.dXc(I)*lam m[:, 1, :, :, :] = fdt.dYc(I)*lam m[:,", "=0 if Ab.shape[1] == 2: dim = 1 elif Ab.shape[1]", "# from builtins import range import torch from torch.nn.parameter import", "' 'Some functionality may not be available.') def my_hasnan(x): \"\"\"Check", "inv_affine_param = Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0], -1) return inv_affine_param def update_affine_param(Ab, Cd):", "idnp = np.zeros([3,sz[0], sz[1], sz[2]], dtype=dtype) idnp[0,:, :, :] =", "Ab = Ab.view(Ab.shape[0], dim+1, dim).transpose(1,2) Ab_inv = torch.zeros_like(Ab) for n", "torch.min(stds) # max_std = torch.max(stds) # omt_const = torch.abs(torch.log(max_std/stds))**omt_power #", "32 - bit floating point: torch.FloatTensor, torch.cuda.FloatTensor # 64 -", "elif nr==6: return 2 elif nr==12: return 3 else: raise", "== 2: g = np.exp(-np.power(X[0,:,:]-mu[0],2.)/(2*np.power(sig[0],2.)) - np.power(X[1,:, :] - mu[1],", "#When affine=False the output of BatchNorm is equivalent to considering", "not be available.') def my_hasnan(x): \"\"\"Check if any input elements", "and 3.') def set_affine_transform_to_identity_multiN(Ab): \"\"\"Set the affine transforms to the", "3, 4, 5, 6, 7, 8, 9]: raise ValueError('Currently only", ":, :,-2] - tensor[:, :, :,-3] if dim==3: tensor[:, :,:,", "if lowResSize[-1] % 2 != 0: lowResSize[-1] -= 1 print(", "self.active_unit = nn.ELU(inplace=True) else: self.active_unit = None def forward(self, x):", "'float64', ...) # :return: returns the identity map of dimension", "spacing, low_res_size[2::],spline_order) return low_res_image def individual_parameters_to_model_parameters(ind_pars): model_pars = dict() if", "elif self.net_sched == 'm_d_s': input = organize_data(m, new_s, sched='depth_concat') elif", "-1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_kaiming(m): classname =", "active_unit='relu'): super(FcRel, self).__init__() self.fc = nn.Linear(in_features, out_features) if active_unit ==", "list and each list element has a dictionary with keys", "dim = 3 if dim not in [1, 2, 3]:", "dim==1: tensor[:,:,-1]= tensor[:,:-2]+ tensor[:,:-2]-tensor[:,:-3] if dim==2: tensor[:, :, -1,:] =", "2D, [5,10,10] in 3D) :param nrOfI: number of images :return:", "self.net_sched == 'm_d_s_f_t': input = organize_data(m, new_s, sched='depth_concat') input =", "image :param lowResSize: size of low re parameterization :return: returns", "1 :param d2: dictionary 2 :return: resulting dictionary \"\"\" d", "= STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=False, use_01_input=use_01_input) elif spline_order == 1: stn", "same_padding=True, bn=using_bn)] if using_sigmoid: net += [nn.Sigmoid()] self.net = nn.Sequential(*net)", "# id[d] -= spacing[d]*(sz[d]//2) # else: # #odd # id[d]", "supported for the identity map') return idnp def omt_boundary_weight_mask(img_sz,spacing,mask_range=5,mask_value=5,smoother_std =0.05):", "try: from .libraries.functions.nn_interpolation import get_nn_interpolation except ImportError: print('WARNING: nn_interpolation could", "sz[1], sz[2]], dtype=dtype) # idnp[0, :, :, :] = id[0]", "elif classname.find('Linear') != -1: init.kaiming_normal(m.weight.data, a=0, mode='fan_in') elif classname.find('BatchNorm2d') !=", "par_to_name_dict def remove_infs_from_variable(v): # 32 - bit floating point: torch.FloatTensor,", "phi * Ab[0] + Ab[1] elif dim == 2: phiR[0,", "dim == 1: return _compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim", "so it is centered around 0) :param sz: size of", "id[d] *= spacing[d] # if sz[d]%2==0: # #even # id[d]", "spline_order) I1_warped = stn(I0, phi) return I1_warped def _compute_warped_image_multiNC_2d(I0, phi,", "results m = m + compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c, ...], I[:, c,", "of size nrOfIxdimxXxYxZ \"\"\" dim = len(sz) csz = np.array(sz)", "nr_of_images=1): \"\"\" Create vector field torch Variable of given size", "sz[0]], dtype=dtype) idnp[0, :] = id[0] elif dim == 2:", "tmp.requires_grad = True else: tmp = Parameter(tmp) return tmp def", "for 1D, 2 for 2D, and 3 for 3D) :return:", "Ab: parameter vector :return: dimensionality of transform (1,2,or 3) \"\"\"", "finish running torch.cuda.synchronize() print(start.elapsed_time(end)) return output return __time_warped_function def interoplate_boundary_right(tensor):", "size BxdimxXxYxZ :param spacing: image spacing [dx,dy,dz] :return: returns the", "* np.power(sig[0], 2.)) -np.power(X[1,:, :, :] - mu[1], 2.) /", "it into range [0,(sz-1)*spacing]^d id = np.array( id.astype(dtype) ) if", "gi[1], gi[2] class ConvBnRel(nn.Module): # conv + bn (optional) +", "id[0] idnp[1,:, :] = id[1] elif dim==3: idnp = np.zeros([3,sz[0],", "np.power(sig[0], 2.)) -np.power(X[1,:, :, :] - mu[1], 2.) / (2", "if identity_map is not None: idDes = identity_map else: idDes", "not remove them') if current_dim == dim: return A else:", "transform. :param Ab: Affine parameter vector (will be overwritten with", "= np.zeros([2, sz[0], sz[1]], dtype=dtype) idnp[0, :, :] = id[0]", "size should not occur in practice anyway sz = v.size()", "smooth weight mask for the omt \"\"\" dim = len(img_sz)", "momentum_boundary_weight_mask(img_sz,spacing,mask_range=5,smoother_std =0.05,pow=2): \"\"\"generate a smooth weight mask for the omt", "classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_xavier(m):", "= n_batch identity_map = identity_map[:n_batch] resampled, new_spacing = resample_image(I, spacing,", "Ab[4]=1. Ab[8]=1. else: raise ValueError('Only supports dimensions 1, 2, and", ":, :] = id[1] * spacing_ratio[1] # elif dim ==", "# id[d] -= spacing[d]*((sz[d]+1)//2) # # # and now store", "model_pars = ind_pars else: # if ind_pars is not a", "# 16 - bit floating point: torch.HalfTensor, torch.cuda.HalfTensor # todo:", "AdaptVal(torch.ones(*mask_sz))*mask_value if dim ==2: mask[:,:,mask_range:-mask_range,mask_range:-mask_range]=1 elif dim==3: mask[:,:,mask_range:-mask_range,mask_range:-mask_range,mask_range:-mask_range ]=1 sm", "returns vector field of size dimxXxYxZ \"\"\" dim = len(sz)", "def t2np(v): \"\"\" Takes a torch array and returns it", "torch.cuda.HalfTensor # todo: maybe find a cleaner way of handling", "m + compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c, ...], I[:, c, ...], nrOfI, sz[2::],", "-1: space_normal(m.weight.data) elif classname.find('Linear') != -1: space_normal(m.weight.data) elif classname.find('BatchNorm2d') !=", "-1: init.normal(m.weight.data) elif classname.find('Linear') != -1: init.normal(m.weight.data) elif classname.find('BatchNorm2d') !=", "= 3 if dim not in [1, 2, 3]: raise", ":return: Inverse of affine parameters \"\"\" dim =0 if Ab.shape[1]", "organize_data(input, self.t, sched='depth_concat') elif self.net_sched == 'm_d_s_f_t': input = organize_data(m,", "nrOfI=1,sched='w_K_w',get_preweight_from_network=False): \"\"\" Create vector field torch Parameter of given size", "\"\"\" # dim = len(sz) # if dim == 1:", ":,-2,:] - tensor[:, :,-3,:] tensor[:, :, :,-1] = tensor[:, :,", "Ab[1] * phi[0, ...] + Ab[3] * phi[1, ...] +", "condition = True if type(v.data) == torch.cuda.FloatTensor or v.data.dtype==torch.float32: return", "here before running this code') spacing = np.ones(dim) centered_id =", "abs_t = os.path.abspath(tf) root_t,ext_t = os.path.splitext(abs_t) abs_t_with_right_ext = root_t +", "* phi[1, ...] + Ab[8] * phi[2, ...] + Ab[11]", "if dim ==3: v = v.permute(0,2,3,4,1) return v def get_scalar(v):", "momentum \"\"\" nrOfI = sz[0] # number of images m", "and rescale by the ratio # if dim == 1:", "init.uniform(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02)", "sm.smooth(mask) if pow ==2: mask = mask**2 if pow ==3:", "1, 2, and 3.') return phiR def apply_affine_transform_to_map_multiNC(Ab,phi): \"\"\"Applies an", "Ab[7] * phi[2, ...] + Ab[10] phiR[2, ...] = Ab[2]", "* phi[0, ...] + Ab[5] * phi[1, ...] + Ab[8]", "* np.power(sig[1], 2.))) g = g/g.sum() return g elif dim", "_compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order, zero_boundary=False, use_01_input=True): if spline_order not in", "+ Ab[8] * phi[2, ...] + Ab[11] else: raise ValueError('Only", "elif isinstance(v, np.ndarray) and v.size == 1: return float(v) def", "0.02) init.constant(m.bias.data, 0.0) def weights_init_xavier(m): classname = m.__class__.__name__ # print(classname)", "std=std) return tensors for n in range(tensors.size()[0]): for c in", "a_11x+a_21y+b1 phiR[1, ...] = Ab[1] * phi[0, ...] + Ab[3]", "if Ab.shape[1]==2: dim = 1 elif Ab.shape[1]==6: dim = 2", "def get_resampled_image(I, spacing, desiredSize, spline_order=1, zero_boundary=False, identity_map=None): \"\"\" :param I:", "+ Ab[7] * phi[2, ...] + Ab[10] phiR[2, ...] =", "key in pd: pl.append(pd[key]) par_to_name_dict[pd[key]] = key return pl, par_to_name_dict", "an identity map :param sz: size of an image in", "mask = mask**2 if pow ==3: mask = mask*mask*mask return", "+[1]*(dim+1) # return omt_const.view(*sz) def get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params = pars.ParameterDict() s_m_params['smoother']['type']", "# id = np.mgrid[0:sz[0], 0:sz[1], 0:sz[2]] # else: # raise", "identity_map(sz,spacing,dtype='float32'): \"\"\" Returns an identity map. :param sz: just the", "return omt_const.view(*sz) def get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params = pars.ParameterDict() s_m_params['smoother']['type'] = 'gaussian'", "arbitrary batch size). :param Ab: Parameter vectors B x pars", "* spacing_ratio[0] # elif dim == 2: # idnp =", "the identity transform. :param Ab: Affine parameter vector (will be", "!= -1: init.normal(m.weight.data) elif classname.find('BatchNorm2d') != -1: init.uniform(m.weight.data, 1.0, 0.02)", "sched='depth_concat') elif self.net_sched == 'm_d_s_f_t': input = organize_data(m, new_s, sched='depth_concat')", "point: torch.DoubleTensor, torch.cuda.DoubleTensor # 16 - bit floating point: torch.HalfTensor,", "2, :, :, :] = fdt.dZc(I)*lam else: raise ValueError('Can only", "3 else: raise ValueError('Only supports dimensions 1, 2, and 3.')", "return _compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) else: raise ValueError('Images can only", "id[n, ...] = centered_identity_map(sz[2::], spacing,dtype=dtype) return id def identity_map_multiN(sz,spacing,dtype='float32'): \"\"\"", "map') return idnp # # def centered_min_normalized_identity_map(sz, spacing, dtype='float32'): #", "with keys 'name' and 'model_params' for par in ind_pars: model_pars[par['name']]", ":, :] = id[0] * spacing_ratio[0] # idnp[1, :, :]", "d in range(dim): id[d]*=spacing[d] #id[d]*=2./(sz[d]-1) #id[d]-=1. # and now store", "idnp # # def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing", "True if NaNs are present, False else \"\"\" return (x", "returns vector field of size nrOfIxnrOfCxXxYxZ \"\"\" csz = np.array(sz)", "padding=padding,bias=bias) #y = \\frac{x - mean[x]}{ \\sqrt{Var[x] + \\epsilon}} *", "\"\"\" space normalize for the net kernel :param tensor: :param", "# id = id.reshape(1, sz[0]) # add a dummy first", "spacing :param spacing: image spacing :param sz: size of image", "tensor[:, :, :,-2] + tensor[:, :, :,-2] - tensor[:, :,", "phi, spacing,spline_order=0,zero_boundary=True) # check if here should be add assert", "classname.find('Linear') != -1: init.uniform(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm2d') != -1:", "I1_warped def _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not in", "current_dim = len(A.shape) if current_dim > dim: raise ValueError('Can only", "par in ind_pars: model_pars[par['name']] = par['model_params'] return model_pars def compute_vector_momentum_from_scalar_momentum_multiNC(lam,", "nrOfI = sz[0] # number of images m = create_ND_vector_field_variable_multiN(sz[2::],", "param. vector) :return: Inverse of affine parameters \"\"\" dim =0", "return gi[0], gi[1], gi[2] class ConvBnRel(nn.Module): # conv + bn", "at which to evaluate :param mu: array indicating the mean", "warp, image size XxYxZ :param phi: map for the warping,", "# if dim == 1: # idnp = np.zeros([1, sz[0]],", "[var*spacing_ratio_t if var is not None else None for var", "gain=1) elif classname.find('Linear') != -1: init.orthogonal(m.weight.data, gain=1) elif classname.find('BatchNorm2d') !=", "STN_ND_BCXYZ(spacing, zero_boundary, use_bilinear=True, use_01_input=use_01_input) else: stn = SplineInterpolation_ND_BCXYZ(spacing, spline_order) I1_warped", "def create_ND_vector_field_variable_multiN(sz, nr_of_images=1): \"\"\" Create vector field torch Variable of", "= np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype) # idnp[0, :, :,", "of affine transforms') if dim != len(sz)-2: raise ValueError('Incompatible number", "input should be list of Variable \"\"\" return [len(np.argwhere(np.isnan(elem.detach().cpu().numpy()))) for", "spatial dimensions, i.e., XxYxZ # :param spacing: list with spacing", "return Parameter(MyTensor(*(csz.tolist())).normal_(0.,1e-7)) def centered_identity_map_multiN(sz, spacing, dtype='float32'): \"\"\" Create a centered", "the results m = m + compute_vector_momentum_from_scalar_momentum_multiN(lam[:, c, ...], I[:,", "todo:: Reorganize this package in a more meaningful way. \"\"\"", "elif dim == 3: g = np.exp(-np.power(X[0,:, :, :] -", "m[:, 1, :, :] = fdt.dYc(I)*lam elif dim == 3:", "[5,10,10] in 3D) :param nrOfI: number of images :param nrOfC:", "self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding=padding,bias=bias) #y = \\frac{x", ":param desiredSize: array for the desired size (excluding B and", ":param phi: maps; format batchxnrCxXxYxZ (nrC corresponds to dimension) :return:", "Ab.shape[1]==6: dim = 2 elif Ab.shape[1]==12: dim = 3 if", "= phi * Ab[0] + Ab[1] elif dim == 2:", "warped image of size XxYxZ \"\"\" # implements this by", "it into range [0,(sz-1)*spacing]^d # id = np.array(id.astype(dtype)) # if", "recover_var_list_from_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing = np.min(spacing) # spacing_ratio", "using bn, using elu] # inputs should be a dictionary", "phiR def compute_normalized_gaussian(X, mu, sig): \"\"\"Computes a normalized Gaussian. :param", "np.zeros([nrOfI,2,sz[2],sz[3]],dtype=dtype) elif dim == 3: id = np.zeros([nrOfI,3,sz[2],sz[3],sz[4]],dtype=dtype) else: raise", "nn_interpolation could not be imported (only supported in CUDA at", ":return: \"\"\" dim = get_dim_of_affine_transform(Ab) if dim==1: Ab.zero_() Ab[0]=1. elif", "if using_sigmoid: net += [nn.Sigmoid()] self.net = nn.Sequential(*net) def prepare_data(self,", "get_parameter_list_and_par_to_name_dict_from_parameter_dict(pd): \"\"\"Same as get_parameter_list_from_parameter_dict; but also returns a dictionary which", "/ (np.array(lowResSize[2::])-1) def _get_low_res_size_from_size(sz, factor): \"\"\"Returns the corresponding low-res size", "parameterization :return: returns spacing of low res parameterization \"\"\" #", "...] = Ab[2] * phi[0, ...] + Ab[5] * phi[1,", "sz[1]], dtype=dtype) idnp[0, :, :] = id[0] idnp[1, :, :]", "import torch.nn.init as init from . import module_parameters as pars", "identity_map_multiN(sz,spacing,dtype='float32'): \"\"\" Create an identity map :param sz: size of", "init.constant(m.bias.data, 0.0) def weights_init_kaiming(m): classname = m.__class__.__name__ # print(classname) if", "spacing * (np.array(sz[2::])-1) / (np.array(lowResSize[2::])-1) def _get_low_res_size_from_size(sz, factor): \"\"\"Returns the", "momentum from the scalar momentum: :math:`m=\\\\lambda\\\\nabla I`. :param lam: scalar", "== 3: g = np.exp(-np.power(X[0,:, :, :] - mu[0], 2.)", "spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 2: return _compute_warped_image_multiNC_2d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input)", ":param tensor: :param mean: :param std: :return: \"\"\" if isinstance(tensors,", "= torch.min(stds) # max_std = torch.max(stds) # omt_const = torch.abs(torch.log(max_std/stds))**omt_power", "momentum, BxCxXxYxZ :param I: image, BxCxXxYxZ :param sz: size of", "field \"\"\" def __init__(self, inputs, dim, net_sched=None): # settings should", "_compute_warped_image_multiNC_3d(I0, phi, spacing, spline_order,zero_boundary=False,use_01_input=True): if spline_order not in [0, 1,", "spacing of image :return: returns the vector momentum \"\"\" fdt", "-2, :] + tensor[:, :, -2, :] - tensor[:, :,", "= tensors[n][c].size() mus = np.zeros(dim) stds = std * np.ones(dim)", "1.0, 0.02) init.constant(m.bias.data, 0.0) def weights_init_orthogonal(m): classname = m.__class__.__name__ print(classname)", ":] = id[0] * spacing_ratio[0] # elif dim == 2:", "phiR[0, ...] = Ab[0] * phi[0, ...] + Ab[2] *", "Parameter from torch.autograd import Variable from .libraries.modules.stn_nd import STN_ND_BCXYZ from", "set_affine_transform_to_identity_multiN(Ab): \"\"\"Set the affine transforms to the identity (in the", "low res size \"\"\" if (factor is None): print('WARNING: Could", "self.mask input = self.prepare_data(m,new_s) x= input x = self.net(x) return", "elements are NaNs. :param x: numpy array :return: True if", "raise ValueError('Only supports dimensions 1, 2, and 3.') Ab =", "+ Ab[4] # a_11x+a_21y+b1 phiR[1, ...] = Ab[1] * phi[0,", "...] + Ab[9] phiR[1, ...] = Ab[1] * phi[0, ...]", "should already be in the right format model_pars = ind_pars", "dim: desired dimension of view :return: returns view of A", "phi: maps; format batchxnrCxXxYxZ (nrC corresponds to dimension) :return: returns", "# now get it into range [0,(sz-1)*spacing]^d id = np.array(id.astype(dtype))", "[0,(sz-1)*spacing]^d id = np.array(id.astype(dtype)) if dim == 1: id =", ":, -2, :] + tensor[:, :, -2, :] - tensor[:,", "map') for n in range(nrOfI): id[n,...] = identity_map(sz[2::],spacing,dtype=dtype) return id", "phi[0, ...] + Ab[3] * phi[1, ...] + Ab[6] *", "spatial spacing :param desiredSize: array for the desired size (excluding", "== 3: id = np.zeros([nrOfI, 3, sz[2], sz[3], sz[4]], dtype=dtype)", "occur in practice anyway sz = v.size() reduction_factor = np.prod(np.array(sz))", "nrOfIxdimxXxYxZ \"\"\" nr_of_mg_weights = len(gaussian_std_weights) csz = np.array(sz) # just", "parameters \"\"\" dim =0 if Ab.shape[1] == 2: dim =", "_compute_warped_image_multiNC_1d(I0, phi, spacing, spline_order,zero_boundary,use_01_input=use_01_input) elif dim == 2: return _compute_warped_image_multiNC_2d(I0,", "1): print('WARNING: Could not compute low_res_size as factor was '", "default if sched =='w_K_w': gaussian_std_weights = [torch.sqrt(std_w) for std_w in", "for std_w in gaussian_std_weights] for g in range(nr_of_mg_weights): weights[:, g,", "returns vector field of size nrOfIxdimxXxYxZ \"\"\" nr_of_mg_weights = len(gaussian_std_weights)", "not None else None for var in var_list] # else:", "# def tranfrom_var_list_into_min_normalized_space(var_list,spacing,do_transform=True): # if do_transform: # min_spacing = np.min(spacing)", "0.0) def weights_init_orthogonal(m): classname = m.__class__.__name__ print(classname) if classname.find('Conv') !=", "than it could be, # but values of this size", "9]: raise ValueError('Currently only orders 0 to 9 are supported')", "idnp[0, :] = id[0] elif dim == 2: idnp =", "spacing_ratio_t = spacing_ratio_t.view(*sp_sz) # new_var_list = [var*spacing_ratio_t if var is", "dim = tensors[n][c].dim() sz = tensors[n][c].size() mus = np.zeros(dim) stds", "!= identity_map.shape[0]: n_batch = I.shape[0] desiredSize = desiredSize.copy() desiredSize[0] =", ":, dim] = - torch.matmul(tm_inv, Ab[n,:,dim]) inv_affine_param = Ab_inv.transpose(1, 2).contiguous().view(Ab.shape[0],", "sf from .data_wrapper import USE_CUDA import numpy as np from", "dimension of view :return: returns view of A of appropriate", "corresponding to an affine transformation of the form y=Ax+b stored", "raise ValueError('Images can only be warped in dimensions 1 to", "just the spatial dimensions, i.e., XxYxZ :param spacing: list with", ":return: low res size \"\"\" if (factor is None): print('WARNING:", "settings should include [using_bias, using bn, using elu] # inputs", "using_sigmoid: net += [nn.Sigmoid()] self.net = nn.Sequential(*net) elif self.net_sched =='m_f_s':", "(nrC corresponds to dimension) :return: returns transformed map \"\"\" sz", "return v elif isinstance(v, np.ndarray) and v.size == 1: return", "padding=padding_size, bias=False) else: net = \\ [ConvBnRel(self.dim +1, 20, 5,", "debugging: self.net = nn.Conv2d(self.dim+2, self.dim, kernel_size, 1, padding=padding_size, bias=False) else:", "todo: maybe find a cleaner way of handling this #", "= par['model_params'] return model_pars def compute_vector_momentum_from_scalar_momentum_multiNC(lam, I, sz, spacing): \"\"\"Computes", "way of doing it return spacing * (np.array(sz[2::])-1) / (np.array(lowResSize[2::])-1)", ":, :] = id[0] * spacing_ratio[0] # idnp[1, :, :,", "def combine_dict(d1,d2): \"\"\"Creates a dictionary which has entries from both", "elif init_type == 'kaiming': net.apply(weights_init_kaiming) elif init_type == 'orthogonal': net.apply(weights_init_orthogonal)", "elif dim == 2: m[:, 0, :, :] = fdt.dXc(I)*lam", "forward(self, x): x = self.fc(x) if self.active_unit is not None:", "everything to finish running torch.cuda.synchronize() print(start.elapsed_time(end)) return output return __time_warped_function", "warped_label_map def t2np(v): \"\"\" Takes a torch array and returns", "in range(tensors.size()[1]): dim = tensors[n][c].dim() sz = tensors[n][c].size() mus =", "a dictionary which has entries from both of them. :param", "id[d] -= spacing[d]*((sz[d]+1)//2) # # # and now store it", "\"\"\" dim = len(sz) if dim == 1: id =", "get_single_gaussian_smoother(gaussian_std,sz,spacing): s_m_params = pars.ParameterDict() s_m_params['smoother']['type'] = 'gaussian' s_m_params['smoother']['gaussian_std'] = gaussian_std", "field torch Parameter of given size. :param sz: just the", "return sz else: lowResSize = np.array(sz) if not isinstance(factor, list):", "in X Example:: >>> mu, sig = [1,1], [1,1] >>>", "4, 5, 6, 7, 8, 9]: raise ValueError('Currently only orders", "if dim == 1: # id = np.mgrid[0:sz[0]] # elif", "np.array(list(I.size())) # check that the batch size and the number", "dictionary assume that they come from the optimizer # (i.e.,", "from __future__ import absolute_import # from builtins import str #", ":param Ab: Affine parameter vector (will be overwritten with the", "dim == 3: idnp = np.zeros([3, sz[0], sz[1], sz[2]], dtype=dtype)", "# add a dummy first index # # for d", "dim: return A else: return A.reshape([1]*(dim-current_dim)+list(A.shape)) def get_dim_of_affine_transform(Ab): \"\"\"Returns the", "return A.reshape([1]*(dim-current_dim)+list(A.shape)) def get_dim_of_affine_transform(Ab): \"\"\"Returns the number of dimensions corresponding", "torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) start.record() output = f(input) end.record() #", "=spacing/min_spacing # dim = spacing.size # spacing_ratio_t = AdaptVal(torch.Tensor(spacing_ratio)) #", "low-res parameterization from image spacing :param spacing: image spacing :param", "entries from both of them. :param d1: dictionary 1 :param", ":, :] = fdt.dXc(I)*lam m[:, 1, :, :, :] =", "is centered around 0) :param sz: size of an image", "is odd Otherwise shifts everything by 0.5*spacing :param sz: just", "phi[1, ...] + Ab[7] * phi[2, ...] + Ab[10] phiR[2,", "else: raise ValueError(\" the label warping method is not implemented\")", "desiredSize = desiredSize[2:] is_numpy = False if not isinstance(I, torch.Tensor):" ]
[ "2D image (x: time, y: channels, color: amplitude) evoked.plot_image(exclude=[], time_unit='s')", "a butterfly plot: # By using exclude=[] bad channels are", "from mne.datasets import sample print(__doc__) data_path = sample.data_path() fname =", "+ '/MEG/sample/sample_audvis-ave.fif' # Reading condition = 'Left Auditory' evoked =", "import sample print(__doc__) data_path = sample.data_path() fname = data_path +", "proj=True) ############################################################################### # Show result as a butterfly plot: #", "sample.data_path() fname = data_path + '/MEG/sample/sample_audvis-ave.fif' # Reading condition =", "evoked datasets. \"\"\" # Author: <NAME> <<EMAIL>> # # License:", "print(__doc__) data_path = sample.data_path() fname = data_path + '/MEG/sample/sample_audvis-ave.fif' #", "time_unit='s') ############################################################################### # Use :func:`mne.Evoked.save` or :func:`mne.write_evokeds` to write the", "############################################################################### # Show result as a butterfly plot: # By", "fname = data_path + '/MEG/sample/sample_audvis-ave.fif' # Reading condition = 'Left", "writing an evoked file ================================== This script shows how to", "Reading and writing an evoked file ================================== This script shows", "0), proj=True) ############################################################################### # Show result as a butterfly plot:", "write evoked datasets. \"\"\" # Author: <NAME> <<EMAIL>> # #", "read and write evoked datasets. \"\"\" # Author: <NAME> <<EMAIL>>", "evoked.plot_image(exclude=[], time_unit='s') ############################################################################### # Use :func:`mne.Evoked.save` or :func:`mne.write_evokeds` to write", "<NAME> <<EMAIL>> # # License: BSD (3-clause) from mne import", "from mne import read_evokeds from mne.datasets import sample print(__doc__) data_path", "read_evokeds from mne.datasets import sample print(__doc__) data_path = sample.data_path() fname", "are shown in red evoked.plot(exclude=[], time_unit='s') # Show result as", "Show result as a butterfly plot: # By using exclude=[]", "# Show result as a 2D image (x: time, y:", "(x: time, y: channels, color: amplitude) evoked.plot_image(exclude=[], time_unit='s') ############################################################################### #", "butterfly plot: # By using exclude=[] bad channels are not", "shows how to read and write evoked datasets. \"\"\" #", "in red evoked.plot(exclude=[], time_unit='s') # Show result as a 2D", "= data_path + '/MEG/sample/sample_audvis-ave.fif' # Reading condition = 'Left Auditory'", "time, y: channels, color: amplitude) evoked.plot_image(exclude=[], time_unit='s') ############################################################################### # Use", "'Left Auditory' evoked = read_evokeds(fname, condition=condition, baseline=(None, 0), proj=True) ###############################################################################", "import read_evokeds from mne.datasets import sample print(__doc__) data_path = sample.data_path()", "Show result as a 2D image (x: time, y: channels,", "This script shows how to read and write evoked datasets.", "Auditory' evoked = read_evokeds(fname, condition=condition, baseline=(None, 0), proj=True) ############################################################################### #", "y: channels, color: amplitude) evoked.plot_image(exclude=[], time_unit='s') ############################################################################### # Use :func:`mne.Evoked.save`", "\"\"\" ================================== Reading and writing an evoked file ================================== This", "not excluded and are shown in red evoked.plot(exclude=[], time_unit='s') #", "a 2D image (x: time, y: channels, color: amplitude) evoked.plot_image(exclude=[],", "and write evoked datasets. \"\"\" # Author: <NAME> <<EMAIL>> #", "red evoked.plot(exclude=[], time_unit='s') # Show result as a 2D image", "data_path + '/MEG/sample/sample_audvis-ave.fif' # Reading condition = 'Left Auditory' evoked", "bad channels are not excluded and are shown in red", "to read and write evoked datasets. \"\"\" # Author: <NAME>", "= read_evokeds(fname, condition=condition, baseline=(None, 0), proj=True) ############################################################################### # Show result", "mne import read_evokeds from mne.datasets import sample print(__doc__) data_path =", "channels are not excluded and are shown in red evoked.plot(exclude=[],", "(3-clause) from mne import read_evokeds from mne.datasets import sample print(__doc__)", "# Author: <NAME> <<EMAIL>> # # License: BSD (3-clause) from", "plot: # By using exclude=[] bad channels are not excluded", "read_evokeds(fname, condition=condition, baseline=(None, 0), proj=True) ############################################################################### # Show result as", "############################################################################### # Use :func:`mne.Evoked.save` or :func:`mne.write_evokeds` to write the evoked", "excluded and are shown in red evoked.plot(exclude=[], time_unit='s') # Show", "<filename>examples/io/plot_read_evoked.py \"\"\" ================================== Reading and writing an evoked file ==================================", "# Use :func:`mne.Evoked.save` or :func:`mne.write_evokeds` to write the evoked #", "# # License: BSD (3-clause) from mne import read_evokeds from", "shown in red evoked.plot(exclude=[], time_unit='s') # Show result as a", "channels, color: amplitude) evoked.plot_image(exclude=[], time_unit='s') ############################################################################### # Use :func:`mne.Evoked.save` or", "BSD (3-clause) from mne import read_evokeds from mne.datasets import sample", ":func:`mne.Evoked.save` or :func:`mne.write_evokeds` to write the evoked # responses to", ":func:`mne.write_evokeds` to write the evoked # responses to a file.", "baseline=(None, 0), proj=True) ############################################################################### # Show result as a butterfly", "# License: BSD (3-clause) from mne import read_evokeds from mne.datasets", "how to read and write evoked datasets. \"\"\" # Author:", "License: BSD (3-clause) from mne import read_evokeds from mne.datasets import", "# By using exclude=[] bad channels are not excluded and", "datasets. \"\"\" # Author: <NAME> <<EMAIL>> # # License: BSD", "By using exclude=[] bad channels are not excluded and are", "color: amplitude) evoked.plot_image(exclude=[], time_unit='s') ############################################################################### # Use :func:`mne.Evoked.save` or :func:`mne.write_evokeds`", "result as a 2D image (x: time, y: channels, color:", "as a butterfly plot: # By using exclude=[] bad channels", "================================== Reading and writing an evoked file ================================== This script", "evoked file ================================== This script shows how to read and", "are not excluded and are shown in red evoked.plot(exclude=[], time_unit='s')", "# Show result as a butterfly plot: # By using", "as a 2D image (x: time, y: channels, color: amplitude)", "\"\"\" # Author: <NAME> <<EMAIL>> # # License: BSD (3-clause)", "mne.datasets import sample print(__doc__) data_path = sample.data_path() fname = data_path", "script shows how to read and write evoked datasets. \"\"\"", "and writing an evoked file ================================== This script shows how", "file ================================== This script shows how to read and write", "an evoked file ================================== This script shows how to read", "<<EMAIL>> # # License: BSD (3-clause) from mne import read_evokeds", "sample print(__doc__) data_path = sample.data_path() fname = data_path + '/MEG/sample/sample_audvis-ave.fif'", "Reading condition = 'Left Auditory' evoked = read_evokeds(fname, condition=condition, baseline=(None,", "condition = 'Left Auditory' evoked = read_evokeds(fname, condition=condition, baseline=(None, 0),", "'/MEG/sample/sample_audvis-ave.fif' # Reading condition = 'Left Auditory' evoked = read_evokeds(fname,", "result as a butterfly plot: # By using exclude=[] bad", "using exclude=[] bad channels are not excluded and are shown", "evoked = read_evokeds(fname, condition=condition, baseline=(None, 0), proj=True) ############################################################################### # Show", "amplitude) evoked.plot_image(exclude=[], time_unit='s') ############################################################################### # Use :func:`mne.Evoked.save` or :func:`mne.write_evokeds` to", "evoked.plot(exclude=[], time_unit='s') # Show result as a 2D image (x:", "Use :func:`mne.Evoked.save` or :func:`mne.write_evokeds` to write the evoked # responses", "or :func:`mne.write_evokeds` to write the evoked # responses to a", "= sample.data_path() fname = data_path + '/MEG/sample/sample_audvis-ave.fif' # Reading condition", "data_path = sample.data_path() fname = data_path + '/MEG/sample/sample_audvis-ave.fif' # Reading", "= 'Left Auditory' evoked = read_evokeds(fname, condition=condition, baseline=(None, 0), proj=True)", "and are shown in red evoked.plot(exclude=[], time_unit='s') # Show result", "================================== This script shows how to read and write evoked", "condition=condition, baseline=(None, 0), proj=True) ############################################################################### # Show result as a", "image (x: time, y: channels, color: amplitude) evoked.plot_image(exclude=[], time_unit='s') ###############################################################################", "exclude=[] bad channels are not excluded and are shown in", "Author: <NAME> <<EMAIL>> # # License: BSD (3-clause) from mne", "# Reading condition = 'Left Auditory' evoked = read_evokeds(fname, condition=condition,", "time_unit='s') # Show result as a 2D image (x: time," ]
[ "Copyright (C) 2021 NV Access Limited # This file is", "applyWxMonkeyPatches = wxMonkeyPatches.apply def applyMonkeyPatches(): # Apply several monkey patches", "- imported but unused: Patches are applied during import from", "the GNU General Public License. # See the file COPYING", ". import wxMonkeyPatches applyWxMonkeyPatches = wxMonkeyPatches.apply def applyMonkeyPatches(): # Apply", "more details. from . import wxMonkeyPatches applyWxMonkeyPatches = wxMonkeyPatches.apply def", "details. from . import wxMonkeyPatches applyWxMonkeyPatches = wxMonkeyPatches.apply def applyMonkeyPatches():", "# This file is covered by the GNU General Public", "This file is covered by the GNU General Public License.", "= wxMonkeyPatches.apply def applyMonkeyPatches(): # Apply several monkey patches to", "applied during import from . import comtypesMonkeyPatches # noqa: F401", "NV Access Limited # This file is covered by the", "COPYING for more details. from . import wxMonkeyPatches applyWxMonkeyPatches =", "patches to Enum, prevent cyclic references on ValueError during construction", "General Public License. # See the file COPYING for more", "during import from . import comtypesMonkeyPatches # noqa: F401 #", "of NonVisual Desktop Access (NVDA) # Copyright (C) 2021 NV", "F401 # Apply patches to Enum, prevent cyclic references on", "import wxMonkeyPatches applyWxMonkeyPatches = wxMonkeyPatches.apply def applyMonkeyPatches(): # Apply several", "cyclic references on ValueError during construction from . import enumPatches", ". import comtypesMonkeyPatches # noqa: F401 # Apply patches to", "See the file COPYING for more details. from . import", "is covered by the GNU General Public License. # See", "applyMonkeyPatches(): # Apply several monkey patches to comtypes # F401", "several monkey patches to comtypes # F401 - imported but", "to Enum, prevent cyclic references on ValueError during construction from", "# F401 - imported but unused: Patches are applied during", "but unused: Patches are applied during import from . import", "from . import wxMonkeyPatches applyWxMonkeyPatches = wxMonkeyPatches.apply def applyMonkeyPatches(): #", "wxMonkeyPatches applyWxMonkeyPatches = wxMonkeyPatches.apply def applyMonkeyPatches(): # Apply several monkey", "to comtypes # F401 - imported but unused: Patches are", "Apply patches to Enum, prevent cyclic references on ValueError during", "patches to comtypes # F401 - imported but unused: Patches", "Public License. # See the file COPYING for more details.", "(C) 2021 NV Access Limited # This file is covered", "imported but unused: Patches are applied during import from .", "the file COPYING for more details. from . import wxMonkeyPatches", "covered by the GNU General Public License. # See the", "comtypes # F401 - imported but unused: Patches are applied", "# Apply several monkey patches to comtypes # F401 -", "GNU General Public License. # See the file COPYING for", "Patches are applied during import from . import comtypesMonkeyPatches #", "monkey patches to comtypes # F401 - imported but unused:", "F401 - imported but unused: Patches are applied during import", "2021 NV Access Limited # This file is covered by", "file is covered by the GNU General Public License. #", "by the GNU General Public License. # See the file", "for more details. from . import wxMonkeyPatches applyWxMonkeyPatches = wxMonkeyPatches.apply", "Access (NVDA) # Copyright (C) 2021 NV Access Limited #", "file COPYING for more details. from . import wxMonkeyPatches applyWxMonkeyPatches", "NonVisual Desktop Access (NVDA) # Copyright (C) 2021 NV Access", "references on ValueError during construction from . import enumPatches enumPatches.replace__new__()", "(NVDA) # Copyright (C) 2021 NV Access Limited # This", "Access Limited # This file is covered by the GNU", "Apply several monkey patches to comtypes # F401 - imported", "def applyMonkeyPatches(): # Apply several monkey patches to comtypes #", "prevent cyclic references on ValueError during construction from . import", "Limited # This file is covered by the GNU General", "noqa: F401 # Apply patches to Enum, prevent cyclic references", "# A part of NonVisual Desktop Access (NVDA) # Copyright", "unused: Patches are applied during import from . import comtypesMonkeyPatches", "wxMonkeyPatches.apply def applyMonkeyPatches(): # Apply several monkey patches to comtypes", "from . import comtypesMonkeyPatches # noqa: F401 # Apply patches", "# See the file COPYING for more details. from .", "are applied during import from . import comtypesMonkeyPatches # noqa:", "Enum, prevent cyclic references on ValueError during construction from .", "Desktop Access (NVDA) # Copyright (C) 2021 NV Access Limited", "# Apply patches to Enum, prevent cyclic references on ValueError", "# Copyright (C) 2021 NV Access Limited # This file", "comtypesMonkeyPatches # noqa: F401 # Apply patches to Enum, prevent", "# noqa: F401 # Apply patches to Enum, prevent cyclic", "A part of NonVisual Desktop Access (NVDA) # Copyright (C)", "import from . import comtypesMonkeyPatches # noqa: F401 # Apply", "part of NonVisual Desktop Access (NVDA) # Copyright (C) 2021", "import comtypesMonkeyPatches # noqa: F401 # Apply patches to Enum,", "License. # See the file COPYING for more details. from", "<filename>source/monkeyPatches/__init__.py # A part of NonVisual Desktop Access (NVDA) #" ]
[ "potential for a player\\'s strike', 'description': 'Is a boycott of", "'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5': '17cf47d63ec9323e562c9957a968b565', 'info_dict': { 'id': '16801', 'ext': 'mp4',", "video_id, 'title': title, 'description': try_get(common_data, lambda x: x['episode']['description'], compat_str), 'duration':", "lambda x: x['episode']['video']['id'])) or brightcove_id title = try_get(common_data, lambda x:", "display_id), display_id) for kind in ('episode', 'clip'): current_key = page_data.get(kind,", "elimination of the competition, teams will have 10 hours to", "lambda x: x['episode']['video']['drm'], bool): self.report_drm(display_id) brightcove_id = try_get( common_data, lambda", "webpage = self._download_webpage(url, display_id) page_data = self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});', webpage, 'page", "kind in ('episode', 'clip'): current_key = page_data.get(kind, {}).get( 'current%sKey' %", "try_get(common_data, lambda x: x['episode']['description'], compat_str), 'duration': float_or_none(try_get(common_data, lambda x: x['episode']['video']['duration'],", "'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching': True, }, { # DRM protected 'url':", "and try_get(common_data, lambda x: x['episode']['video']['drm'], bool): self.report_drm(display_id) brightcove_id = try_get(", "x['episode']['video']['duration'], float), 1000), 'thumbnails': thumbnails, 'ie_key': 'BrightcoveNew', 'season_number': season_number, 'episode_number':", "will have 10 hours to build a world inside a", "{'geo_countries': self._GEO_COUNTRIES}), 'id': video_id, 'title': title, 'description': try_get(common_data, lambda x:", "'description': try_get(common_data, lambda x: x['episode']['description'], compat_str), 'duration': float_or_none(try_get(common_data, lambda x:", "page_data = self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});', webpage, 'page data', default='{}'), display_id, fatal=False)", "_GEO_COUNTRIES = ['AU'] _TESTS = [{ # clip 'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc',", "break else: raise ExtractorError('Unable to find video data') if not", "webpage, 'page data', default='{}'), display_id, fatal=False) if not page_data: page_data", "{}) if not cache: continue common_data = { 'episode': (cache.get(current_key)", "'uploader_id': '4460760524001', 'timestamp': 1619002200, 'upload_date': '20210421', }, 'expected_warnings': ['Ignoring subtitle", "if not current_key: continue cache = page_data.get(kind, {}).get('%sCache' % kind,", "} break else: raise ExtractorError('Unable to find video data') if", "try_get, unified_strdate, unified_timestamp, ) class NineNowIE(InfoExtractor): IE_NAME = '9now.com.au' _VALID_URL", "title = try_get(common_data, lambda x: x['episode']['name'], compat_str) season_number = try_get(common_data,", "x: x['episode']['video']['drm'], bool): self.report_drm(display_id) brightcove_id = try_get( common_data, lambda x:", "display_id, fatal=False) if not page_data: page_data = self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage,", "smuggle_url, str_or_none, try_get, unified_strdate, unified_timestamp, ) class NineNowIE(InfoExtractor): IE_NAME =", "}, 'expected_warnings': ['Ignoring subtitle tracks'], 'params':{ 'skip_download': True, } }]", "unified_strdate, unified_timestamp, ) class NineNowIE(InfoExtractor): IE_NAME = '9now.com.au' _VALID_URL =", "'page data', default='{}'), display_id, fatal=False) if not page_data: page_data =", "common_data, lambda x: x['episode']['video']['brightcoveId'], compat_str) or 'ref:%s' % common_data['episode']['video']['referenceId'] video_id", "3, 'description': 'In the first elimination of the competition, teams", "x: x['episode']['episodeNumber'], int) timestamp = unified_timestamp(try_get(common_data, lambda x: x['episode']['airDate'], compat_str))", "= ['AU'] _TESTS = [{ # clip 'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5':", "'title': '<NAME>\\'s Joey Montagna on the potential for a player\\'s", "'upload_date': '20160713', 'timestamp': 1468421266, }, 'skip': 'Only available in Australia',", "'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching': True, }, { # DRM protected 'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1',", "episode of series 'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict': { 'id': '6249614030001', 'title':", "float), 1000), 'thumbnails': thumbnails, 'ie_key': 'BrightcoveNew', 'season_number': season_number, 'episode_number': episode_number,", "DRM protected 'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching': True, }, { # episode", "for kind in ('episode', 'clip'): current_key = page_data.get(kind, {}).get( 'current%sKey'", "x['episode']['name'], compat_str) season_number = try_get(common_data, lambda x: x['season']['seasonNumber'], int) episode_number", "lambda x: x['episode']['episodeNumber'], int) timestamp = unified_timestamp(try_get(common_data, lambda x: x['episode']['airDate'],", "if not page_data: page_data = self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage, 'page data'),", "True, } }] BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def _real_extract(self, url): display_id", "x['episode']['image']['sizes'], dict) or {} thumbnails = [{ 'id': thumbnail_id, 'url':", "world inside a snow globe.', 'uploader_id': '4460760524001', 'timestamp': 1619002200, 'upload_date':", "'page data'), display_id), display_id) for kind in ('episode', 'clip'): current_key", "data', default='{}'), display_id, fatal=False) if not page_data: page_data = self._parse_json(self._parse_json(self._search_regex(", "str_or_none, try_get, unified_strdate, unified_timestamp, ) class NineNowIE(InfoExtractor): IE_NAME = '9now.com.au'", "strike', 'description': 'Is a boycott of the NAB Cup \"on", "self._GEO_COUNTRIES}), 'id': video_id, 'title': title, 'description': try_get(common_data, lambda x: x['episode']['description'],", "x['episode']['video']['brightcoveId'], compat_str) or 'ref:%s' % common_data['episode']['video']['referenceId'] video_id = str_or_none(try_get(common_data, lambda", "from ..utils import ( ExtractorError, int_or_none, float_or_none, smuggle_url, str_or_none, try_get,", "import ( ExtractorError, int_or_none, float_or_none, smuggle_url, str_or_none, try_get, unified_strdate, unified_timestamp,", "r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage, 'page data'), display_id), display_id) for kind in ('episode',", "'20210421', }, 'expected_warnings': ['Ignoring subtitle tracks'], 'params':{ 'skip_download': True, }", "for thumbnail_id, thumbnail_url in thumbnails_data.items()] return { '_type': 'url_transparent', 'url':", "bool): self.report_drm(display_id) brightcove_id = try_get( common_data, lambda x: x['episode']['video']['brightcoveId'], compat_str)", "or list(cache.values())[0])[kind], 'season': (cache.get(current_key) or list(cache.values())[0]).get('season', None) } break else:", "unified_strdate(try_get(common_data, lambda x: x['episode']['availability'], compat_str)) thumbnails_data = try_get(common_data, lambda x:", "{ # DRM protected 'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching': True, }, {", "thumbnails = [{ 'id': thumbnail_id, 'url': thumbnail_url, 'width': int_or_none(thumbnail_id[1:]), }", "kind, {}) if not cache: continue common_data = { 'episode':", "the table\"?', 'uploader_id': '4460760524001', 'upload_date': '20160713', 'timestamp': 1468421266, }, 'skip':", "have 10 hours to build a world inside a snow", "x['episode']['airDate'], compat_str)) release_date = unified_strdate(try_get(common_data, lambda x: x['episode']['availability'], compat_str)) thumbnails_data", "the NAB Cup \"on the table\"?', 'uploader_id': '4460760524001', 'upload_date': '20160713',", "a player\\'s strike', 'description': 'Is a boycott of the NAB", "'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching': True, }, { # episode of series", "'episode': (cache.get(current_key) or list(cache.values())[0])[kind], 'season': (cache.get(current_key) or list(cache.values())[0]).get('season', None) }", "'md5': '17cf47d63ec9323e562c9957a968b565', 'info_dict': { 'id': '16801', 'ext': 'mp4', 'title': '<NAME>\\'s", "x: x['episode']['name'], compat_str) season_number = try_get(common_data, lambda x: x['season']['seasonNumber'], int)", "data') if not self.get_param('allow_unplayable_formats') and try_get(common_data, lambda x: x['episode']['video']['drm'], bool):", "inside a snow globe.', 'uploader_id': '4460760524001', 'timestamp': 1619002200, 'upload_date': '20210421',", "tracks'], 'params':{ 'skip_download': True, } }] BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def", "a boycott of the NAB Cup \"on the table\"?', 'uploader_id':", "common_data = { 'episode': (cache.get(current_key) or list(cache.values())[0])[kind], 'season': (cache.get(current_key) or", "} }] BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def _real_extract(self, url): display_id =", "episode 'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching': True, }, { # DRM protected", "cache: continue common_data = { 'episode': (cache.get(current_key) or list(cache.values())[0])[kind], 'season':", "= r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES = ['AU'] _TESTS = [{ # clip", "'url_transparent', 'url': smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, {'geo_countries': self._GEO_COUNTRIES}), 'id': video_id,", "3', 'ext': 'mp4', 'season_number': 3, 'episode_number': 3, 'description': 'In the", "compat_str), 'duration': float_or_none(try_get(common_data, lambda x: x['episode']['video']['duration'], float), 1000), 'thumbnails': thumbnails,", ".common import InfoExtractor from ..compat import compat_str from ..utils import", "['Ignoring subtitle tracks'], 'params':{ 'skip_download': True, } }] BRIGHTCOVE_URL_TEMPLATE =", "brightcove_id = try_get( common_data, lambda x: x['episode']['video']['brightcoveId'], compat_str) or 'ref:%s'", "current_key: continue cache = page_data.get(kind, {}).get('%sCache' % kind, {}) if", "x: x['episode']['image']['sizes'], dict) or {} thumbnails = [{ 'id': thumbnail_id,", "not cache: continue common_data = { 'episode': (cache.get(current_key) or list(cache.values())[0])[kind],", "self.get_param('allow_unplayable_formats') and try_get(common_data, lambda x: x['episode']['video']['drm'], bool): self.report_drm(display_id) brightcove_id =", "hours to build a world inside a snow globe.', 'uploader_id':", "{} thumbnails = [{ 'id': thumbnail_id, 'url': thumbnail_url, 'width': int_or_none(thumbnail_id[1:]),", "'Only available in Australia', }, { # episode 'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19',", "the competition, teams will have 10 hours to build a", "1000), 'thumbnails': thumbnails, 'ie_key': 'BrightcoveNew', 'season_number': season_number, 'episode_number': episode_number, 'timestamp':", "'In the first elimination of the competition, teams will have", "x: x['episode']['video']['duration'], float), 1000), 'thumbnails': thumbnails, 'ie_key': 'BrightcoveNew', 'season_number': season_number,", "'ext': 'mp4', 'title': '<NAME>\\'s Joey Montagna on the potential for", "True, }, { # DRM protected 'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching': True,", "of the NAB Cup \"on the table\"?', 'uploader_id': '4460760524001', 'upload_date':", "int_or_none(thumbnail_id[1:]), } for thumbnail_id, thumbnail_url in thumbnails_data.items()] return { '_type':", "page_data: page_data = self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage, 'page data'), display_id), display_id)", "'skip': 'Only available in Australia', }, { # episode 'url':", "'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict': { 'id': '6249614030001', 'title': 'Episode 3', 'ext':", "'id': '6249614030001', 'title': 'Episode 3', 'ext': 'mp4', 'season_number': 3, 'episode_number':", "import compat_str from ..utils import ( ExtractorError, int_or_none, float_or_none, smuggle_url,", "'season_number': 3, 'episode_number': 3, 'description': 'In the first elimination of", "class NineNowIE(InfoExtractor): IE_NAME = '9now.com.au' _VALID_URL = r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES =", "None) } break else: raise ExtractorError('Unable to find video data')", "list(cache.values())[0])[kind], 'season': (cache.get(current_key) or list(cache.values())[0]).get('season', None) } break else: raise", "'mp4', 'title': '<NAME>\\'s Joey Montagna on the potential for a", "_VALID_URL = r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES = ['AU'] _TESTS = [{ #", "x['episode']['description'], compat_str), 'duration': float_or_none(try_get(common_data, lambda x: x['episode']['video']['duration'], float), 1000), 'thumbnails':", "current_key = page_data.get(kind, {}).get( 'current%sKey' % kind.capitalize()) if not current_key:", "cache = page_data.get(kind, {}).get('%sCache' % kind, {}) if not cache:", "= page_data.get(kind, {}).get( 'current%sKey' % kind.capitalize()) if not current_key: continue", "unified_timestamp(try_get(common_data, lambda x: x['episode']['airDate'], compat_str)) release_date = unified_strdate(try_get(common_data, lambda x:", "x: x['episode']['description'], compat_str), 'duration': float_or_none(try_get(common_data, lambda x: x['episode']['video']['duration'], float), 1000),", "video_id = str_or_none(try_get(common_data, lambda x: x['episode']['video']['id'])) or brightcove_id title =", "[{ 'id': thumbnail_id, 'url': thumbnail_url, 'width': int_or_none(thumbnail_id[1:]), } for thumbnail_id,", "= try_get(common_data, lambda x: x['episode']['name'], compat_str) season_number = try_get(common_data, lambda", "if not self.get_param('allow_unplayable_formats') and try_get(common_data, lambda x: x['episode']['video']['drm'], bool): self.report_drm(display_id)", "'info_dict': { 'id': '6249614030001', 'title': 'Episode 3', 'ext': 'mp4', 'season_number':", "'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching': True, }, { # episode of series 'url':", "'Is a boycott of the NAB Cup \"on the table\"?',", "dict) or {} thumbnails = [{ 'id': thumbnail_id, 'url': thumbnail_url,", "'description': 'Is a boycott of the NAB Cup \"on the", "x: x['episode']['availability'], compat_str)) thumbnails_data = try_get(common_data, lambda x: x['episode']['image']['sizes'], dict)", "= self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage, 'page data'), display_id), display_id) for kind", "'20160713', 'timestamp': 1468421266, }, 'skip': 'Only available in Australia', },", "player\\'s strike', 'description': 'Is a boycott of the NAB Cup", "'ref:%s' % common_data['episode']['video']['referenceId'] video_id = str_or_none(try_get(common_data, lambda x: x['episode']['video']['id'])) or", "'title': 'Episode 3', 'ext': 'mp4', 'season_number': 3, 'episode_number': 3, 'description':", "lambda x: x['episode']['video']['duration'], float), 1000), 'thumbnails': thumbnails, 'ie_key': 'BrightcoveNew', 'season_number':", "self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage, 'page data'), display_id), display_id) for kind in", "% kind.capitalize()) if not current_key: continue cache = page_data.get(kind, {}).get('%sCache'", "{ 'id': '6249614030001', 'title': 'Episode 3', 'ext': 'mp4', 'season_number': 3,", "= try_get( common_data, lambda x: x['episode']['video']['brightcoveId'], compat_str) or 'ref:%s' %", "a snow globe.', 'uploader_id': '4460760524001', 'timestamp': 1619002200, 'upload_date': '20210421', },", "for a player\\'s strike', 'description': 'Is a boycott of the", "Cup \"on the table\"?', 'uploader_id': '4460760524001', 'upload_date': '20160713', 'timestamp': 1468421266,", "thumbnails, 'ie_key': 'BrightcoveNew', 'season_number': season_number, 'episode_number': episode_number, 'timestamp': timestamp, 'release_date':", "else: raise ExtractorError('Unable to find video data') if not self.get_param('allow_unplayable_formats')", "thumbnails_data.items()] return { '_type': 'url_transparent', 'url': smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id,", "lambda x: x['episode']['airDate'], compat_str)) release_date = unified_strdate(try_get(common_data, lambda x: x['episode']['availability'],", "Montagna on the potential for a player\\'s strike', 'description': 'Is", "thumbnail_url, 'width': int_or_none(thumbnail_id[1:]), } for thumbnail_id, thumbnail_url in thumbnails_data.items()] return", "compat_str from ..utils import ( ExtractorError, int_or_none, float_or_none, smuggle_url, str_or_none,", "thumbnail_url in thumbnails_data.items()] return { '_type': 'url_transparent', 'url': smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE", "continue common_data = { 'episode': (cache.get(current_key) or list(cache.values())[0])[kind], 'season': (cache.get(current_key)", "'id': thumbnail_id, 'url': thumbnail_url, 'width': int_or_none(thumbnail_id[1:]), } for thumbnail_id, thumbnail_url", "episode_number = try_get(common_data, lambda x: x['episode']['episodeNumber'], int) timestamp = unified_timestamp(try_get(common_data,", "BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def _real_extract(self, url): display_id = self._match_id(url) webpage", "= 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def _real_extract(self, url): display_id = self._match_id(url) webpage =", "[{ # clip 'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5': '17cf47d63ec9323e562c9957a968b565', 'info_dict': { 'id':", "'6249614030001', 'title': 'Episode 3', 'ext': 'mp4', 'season_number': 3, 'episode_number': 3,", "a world inside a snow globe.', 'uploader_id': '4460760524001', 'timestamp': 1619002200,", "raise ExtractorError('Unable to find video data') if not self.get_param('allow_unplayable_formats') and", "'mp4', 'season_number': 3, 'episode_number': 3, 'description': 'In the first elimination", "of series 'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict': { 'id': '6249614030001', 'title': 'Episode", "'current%sKey' % kind.capitalize()) if not current_key: continue cache = page_data.get(kind,", "season_number = try_get(common_data, lambda x: x['season']['seasonNumber'], int) episode_number = try_get(common_data,", "% brightcove_id, {'geo_countries': self._GEO_COUNTRIES}), 'id': video_id, 'title': title, 'description': try_get(common_data,", "lambda x: x['episode']['description'], compat_str), 'duration': float_or_none(try_get(common_data, lambda x: x['episode']['video']['duration'], float),", "'id': '16801', 'ext': 'mp4', 'title': '<NAME>\\'s Joey Montagna on the", "= try_get(common_data, lambda x: x['episode']['episodeNumber'], int) timestamp = unified_timestamp(try_get(common_data, lambda", "'title': title, 'description': try_get(common_data, lambda x: x['episode']['description'], compat_str), 'duration': float_or_none(try_get(common_data,", "lambda x: x['episode']['name'], compat_str) season_number = try_get(common_data, lambda x: x['season']['seasonNumber'],", "display_id = self._match_id(url) webpage = self._download_webpage(url, display_id) page_data = self._parse_json(self._search_regex(", "self._download_webpage(url, display_id) page_data = self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});', webpage, 'page data', default='{}'),", "brightcove_id, {'geo_countries': self._GEO_COUNTRIES}), 'id': video_id, 'title': title, 'description': try_get(common_data, lambda", "'uploader_id': '4460760524001', 'upload_date': '20160713', 'timestamp': 1468421266, }, 'skip': 'Only available", "('episode', 'clip'): current_key = page_data.get(kind, {}).get( 'current%sKey' % kind.capitalize()) if", "boycott of the NAB Cup \"on the table\"?', 'uploader_id': '4460760524001',", "not current_key: continue cache = page_data.get(kind, {}).get('%sCache' % kind, {})", "% kind, {}) if not cache: continue common_data = {", "'BrightcoveNew', 'season_number': season_number, 'episode_number': episode_number, 'timestamp': timestamp, 'release_date': release_date, }", "to find video data') if not self.get_param('allow_unplayable_formats') and try_get(common_data, lambda", "= unified_strdate(try_get(common_data, lambda x: x['episode']['availability'], compat_str)) thumbnails_data = try_get(common_data, lambda", "subtitle tracks'], 'params':{ 'skip_download': True, } }] BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s'", "fatal=False) if not page_data: page_data = self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage, 'page", "..utils import ( ExtractorError, int_or_none, float_or_none, smuggle_url, str_or_none, try_get, unified_strdate,", "display_id) for kind in ('episode', 'clip'): current_key = page_data.get(kind, {}).get(", "r'window\\.__data\\s*=\\s*({.*?});', webpage, 'page data', default='{}'), display_id, fatal=False) if not page_data:", "try_get(common_data, lambda x: x['season']['seasonNumber'], int) episode_number = try_get(common_data, lambda x:", "['AU'] _TESTS = [{ # clip 'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5': '17cf47d63ec9323e562c9957a968b565',", "try_get(common_data, lambda x: x['episode']['episodeNumber'], int) timestamp = unified_timestamp(try_get(common_data, lambda x:", "not self.get_param('allow_unplayable_formats') and try_get(common_data, lambda x: x['episode']['video']['drm'], bool): self.report_drm(display_id) brightcove_id", "try_get(common_data, lambda x: x['episode']['name'], compat_str) season_number = try_get(common_data, lambda x:", "compat_str) season_number = try_get(common_data, lambda x: x['season']['seasonNumber'], int) episode_number =", "'episode_number': 3, 'description': 'In the first elimination of the competition,", "% common_data['episode']['video']['referenceId'] video_id = str_or_none(try_get(common_data, lambda x: x['episode']['video']['id'])) or brightcove_id", "'upload_date': '20210421', }, 'expected_warnings': ['Ignoring subtitle tracks'], 'params':{ 'skip_download': True,", "def _real_extract(self, url): display_id = self._match_id(url) webpage = self._download_webpage(url, display_id)", "try_get(common_data, lambda x: x['episode']['video']['drm'], bool): self.report_drm(display_id) brightcove_id = try_get( common_data,", "= unified_timestamp(try_get(common_data, lambda x: x['episode']['airDate'], compat_str)) release_date = unified_strdate(try_get(common_data, lambda", "or list(cache.values())[0]).get('season', None) } break else: raise ExtractorError('Unable to find", "import InfoExtractor from ..compat import compat_str from ..utils import (", "'Episode 3', 'ext': 'mp4', 'season_number': 3, 'episode_number': 3, 'description': 'In", "'timestamp': 1468421266, }, 'skip': 'Only available in Australia', }, {", "thumbnail_id, 'url': thumbnail_url, 'width': int_or_none(thumbnail_id[1:]), } for thumbnail_id, thumbnail_url in", "list(cache.values())[0]).get('season', None) } break else: raise ExtractorError('Unable to find video", "'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def _real_extract(self, url): display_id = self._match_id(url) webpage = self._download_webpage(url,", "series 'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict': { 'id': '6249614030001', 'title': 'Episode 3',", "data'), display_id), display_id) for kind in ('episode', 'clip'): current_key =", "'4460760524001', 'timestamp': 1619002200, 'upload_date': '20210421', }, 'expected_warnings': ['Ignoring subtitle tracks'],", "Australia', }, { # episode 'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching': True, },", "in ('episode', 'clip'): current_key = page_data.get(kind, {}).get( 'current%sKey' % kind.capitalize())", "1468421266, }, 'skip': 'Only available in Australia', }, { #", "'_type': 'url_transparent', 'url': smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, {'geo_countries': self._GEO_COUNTRIES}), 'id':", "'timestamp': 1619002200, 'upload_date': '20210421', }, 'expected_warnings': ['Ignoring subtitle tracks'], 'params':{", "int_or_none, float_or_none, smuggle_url, str_or_none, try_get, unified_strdate, unified_timestamp, ) class NineNowIE(InfoExtractor):", "timestamp = unified_timestamp(try_get(common_data, lambda x: x['episode']['airDate'], compat_str)) release_date = unified_strdate(try_get(common_data,", "Joey Montagna on the potential for a player\\'s strike', 'description':", "x['episode']['availability'], compat_str)) thumbnails_data = try_get(common_data, lambda x: x['episode']['image']['sizes'], dict) or", "= self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});', webpage, 'page data', default='{}'), display_id, fatal=False) if", "'thumbnails': thumbnails, 'ie_key': 'BrightcoveNew', 'season_number': season_number, 'episode_number': episode_number, 'timestamp': timestamp,", "thumbnail_id, thumbnail_url in thumbnails_data.items()] return { '_type': 'url_transparent', 'url': smuggle_url(", "self._match_id(url) webpage = self._download_webpage(url, display_id) page_data = self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});', webpage,", "'url': thumbnail_url, 'width': int_or_none(thumbnail_id[1:]), } for thumbnail_id, thumbnail_url in thumbnails_data.items()]", "r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES = ['AU'] _TESTS = [{ # clip 'url':", "find video data') if not self.get_param('allow_unplayable_formats') and try_get(common_data, lambda x:", "'only_matching': True, }, { # DRM protected 'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching':", "'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict': { 'id': '6249614030001', 'title': 'Episode 3', 'ext': 'mp4',", "x['season']['seasonNumber'], int) episode_number = try_get(common_data, lambda x: x['episode']['episodeNumber'], int) timestamp", "'only_matching': True, }, { # episode of series 'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3',", "in Australia', }, { # episode 'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching': True,", "} for thumbnail_id, thumbnail_url in thumbnails_data.items()] return { '_type': 'url_transparent',", "int) timestamp = unified_timestamp(try_get(common_data, lambda x: x['episode']['airDate'], compat_str)) release_date =", "not page_data: page_data = self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage, 'page data'), display_id),", "'params':{ 'skip_download': True, } }] BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def _real_extract(self,", "title, 'description': try_get(common_data, lambda x: x['episode']['description'], compat_str), 'duration': float_or_none(try_get(common_data, lambda", "'17cf47d63ec9323e562c9957a968b565', 'info_dict': { 'id': '16801', 'ext': 'mp4', 'title': '<NAME>\\'s Joey", "10 hours to build a world inside a snow globe.',", "page_data.get(kind, {}).get( 'current%sKey' % kind.capitalize()) if not current_key: continue cache", "(cache.get(current_key) or list(cache.values())[0])[kind], 'season': (cache.get(current_key) or list(cache.values())[0]).get('season', None) } break", "3, 'episode_number': 3, 'description': 'In the first elimination of the", "= self._download_webpage(url, display_id) page_data = self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});', webpage, 'page data',", "1619002200, 'upload_date': '20210421', }, 'expected_warnings': ['Ignoring subtitle tracks'], 'params':{ 'skip_download':", "or 'ref:%s' % common_data['episode']['video']['referenceId'] video_id = str_or_none(try_get(common_data, lambda x: x['episode']['video']['id']))", "teams will have 10 hours to build a world inside", "{ # episode 'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching': True, }, { #", "# episode of series 'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict': { 'id': '6249614030001',", "on the potential for a player\\'s strike', 'description': 'Is a", "unified_timestamp, ) class NineNowIE(InfoExtractor): IE_NAME = '9now.com.au' _VALID_URL = r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)'", "'ie_key': 'BrightcoveNew', 'season_number': season_number, 'episode_number': episode_number, 'timestamp': timestamp, 'release_date': release_date,", "or {} thumbnails = [{ 'id': thumbnail_id, 'url': thumbnail_url, 'width':", "smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, {'geo_countries': self._GEO_COUNTRIES}), 'id': video_id, 'title': title,", "protected 'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching': True, }, { # episode of", "x: x['episode']['video']['id'])) or brightcove_id title = try_get(common_data, lambda x: x['episode']['name'],", "page_data.get(kind, {}).get('%sCache' % kind, {}) if not cache: continue common_data", "= { 'episode': (cache.get(current_key) or list(cache.values())[0])[kind], 'season': (cache.get(current_key) or list(cache.values())[0]).get('season',", "str_or_none(try_get(common_data, lambda x: x['episode']['video']['id'])) or brightcove_id title = try_get(common_data, lambda", "'id': video_id, 'title': title, 'description': try_get(common_data, lambda x: x['episode']['description'], compat_str),", "page_data = self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;', webpage, 'page data'), display_id), display_id) for", "common_data['episode']['video']['referenceId'] video_id = str_or_none(try_get(common_data, lambda x: x['episode']['video']['id'])) or brightcove_id title", "url): display_id = self._match_id(url) webpage = self._download_webpage(url, display_id) page_data =", "lambda x: x['episode']['image']['sizes'], dict) or {} thumbnails = [{ 'id':", "x: x['episode']['video']['brightcoveId'], compat_str) or 'ref:%s' % common_data['episode']['video']['referenceId'] video_id = str_or_none(try_get(common_data,", "to build a world inside a snow globe.', 'uploader_id': '4460760524001',", "compat_str) or 'ref:%s' % common_data['episode']['video']['referenceId'] video_id = str_or_none(try_get(common_data, lambda x:", "'description': 'In the first elimination of the competition, teams will", "NAB Cup \"on the table\"?', 'uploader_id': '4460760524001', 'upload_date': '20160713', 'timestamp':", "the first elimination of the competition, teams will have 10", "'skip_download': True, } }] BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def _real_extract(self, url):", "{ 'episode': (cache.get(current_key) or list(cache.values())[0])[kind], 'season': (cache.get(current_key) or list(cache.values())[0]).get('season', None)", "float_or_none(try_get(common_data, lambda x: x['episode']['video']['duration'], float), 1000), 'thumbnails': thumbnails, 'ie_key': 'BrightcoveNew',", "}, { # episode of series 'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict': {", "clip 'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5': '17cf47d63ec9323e562c9957a968b565', 'info_dict': { 'id': '16801', 'ext':", "lambda x: x['episode']['video']['brightcoveId'], compat_str) or 'ref:%s' % common_data['episode']['video']['referenceId'] video_id =", "kind.capitalize()) if not current_key: continue cache = page_data.get(kind, {}).get('%sCache' %", ") class NineNowIE(InfoExtractor): IE_NAME = '9now.com.au' _VALID_URL = r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES", "x: x['episode']['airDate'], compat_str)) release_date = unified_strdate(try_get(common_data, lambda x: x['episode']['availability'], compat_str))", "# clip 'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5': '17cf47d63ec9323e562c9957a968b565', 'info_dict': { 'id': '16801',", "'info_dict': { 'id': '16801', 'ext': 'mp4', 'title': '<NAME>\\'s Joey Montagna", "return { '_type': 'url_transparent', 'url': smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, {'geo_countries':", "_real_extract(self, url): display_id = self._match_id(url) webpage = self._download_webpage(url, display_id) page_data", "NineNowIE(InfoExtractor): IE_NAME = '9now.com.au' _VALID_URL = r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES = ['AU']", "# DRM protected 'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching': True, }, { #", "}] BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/4460760524001/default_default/index.html?videoId=%s' def _real_extract(self, url): display_id = self._match_id(url)", "<reponame>nxtreaming/yt-dlp<gh_stars>10-100 from .common import InfoExtractor from ..compat import compat_str from", "try_get(common_data, lambda x: x['episode']['image']['sizes'], dict) or {} thumbnails = [{", "float_or_none, smuggle_url, str_or_none, try_get, unified_strdate, unified_timestamp, ) class NineNowIE(InfoExtractor): IE_NAME", "display_id) page_data = self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});', webpage, 'page data', default='{}'), display_id,", "the potential for a player\\'s strike', 'description': 'Is a boycott", "(cache.get(current_key) or list(cache.values())[0]).get('season', None) } break else: raise ExtractorError('Unable to", "or brightcove_id title = try_get(common_data, lambda x: x['episode']['name'], compat_str) season_number", "'ext': 'mp4', 'season_number': 3, 'episode_number': 3, 'description': 'In the first", "self.report_drm(display_id) brightcove_id = try_get( common_data, lambda x: x['episode']['video']['brightcoveId'], compat_str) or", "'<NAME>\\'s Joey Montagna on the potential for a player\\'s strike',", "{ # episode of series 'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict': { 'id':", "int) episode_number = try_get(common_data, lambda x: x['episode']['episodeNumber'], int) timestamp =", "{ '_type': 'url_transparent', 'url': smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, {'geo_countries': self._GEO_COUNTRIES}),", "table\"?', 'uploader_id': '4460760524001', 'upload_date': '20160713', 'timestamp': 1468421266, }, 'skip': 'Only", "'expected_warnings': ['Ignoring subtitle tracks'], 'params':{ 'skip_download': True, } }] BRIGHTCOVE_URL_TEMPLATE", "= str_or_none(try_get(common_data, lambda x: x['episode']['video']['id'])) or brightcove_id title = try_get(common_data,", "( ExtractorError, int_or_none, float_or_none, smuggle_url, str_or_none, try_get, unified_strdate, unified_timestamp, )", "{ 'id': '16801', 'ext': 'mp4', 'title': '<NAME>\\'s Joey Montagna on", "'width': int_or_none(thumbnail_id[1:]), } for thumbnail_id, thumbnail_url in thumbnails_data.items()] return {", "competition, teams will have 10 hours to build a world", "build a world inside a snow globe.', 'uploader_id': '4460760524001', 'timestamp':", "lambda x: x['episode']['availability'], compat_str)) thumbnails_data = try_get(common_data, lambda x: x['episode']['image']['sizes'],", "ExtractorError, int_or_none, float_or_none, smuggle_url, str_or_none, try_get, unified_strdate, unified_timestamp, ) class", "_TESTS = [{ # clip 'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5': '17cf47d63ec9323e562c9957a968b565', 'info_dict':", "IE_NAME = '9now.com.au' _VALID_URL = r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES = ['AU'] _TESTS", "= try_get(common_data, lambda x: x['episode']['image']['sizes'], dict) or {} thumbnails =", "available in Australia', }, { # episode 'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching':", "'clip'): current_key = page_data.get(kind, {}).get( 'current%sKey' % kind.capitalize()) if not", "thumbnails_data = try_get(common_data, lambda x: x['episode']['image']['sizes'], dict) or {} thumbnails", "'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5': '17cf47d63ec9323e562c9957a968b565', 'info_dict': { 'id': '16801', 'ext': 'mp4', 'title':", "self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});', webpage, 'page data', default='{}'), display_id, fatal=False) if not", "= try_get(common_data, lambda x: x['season']['seasonNumber'], int) episode_number = try_get(common_data, lambda", "x['episode']['video']['id'])) or brightcove_id title = try_get(common_data, lambda x: x['episode']['name'], compat_str)", "lambda x: x['season']['seasonNumber'], int) episode_number = try_get(common_data, lambda x: x['episode']['episodeNumber'],", "{}).get( 'current%sKey' % kind.capitalize()) if not current_key: continue cache =", "'url': smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, {'geo_countries': self._GEO_COUNTRIES}), 'id': video_id, 'title':", "= [{ # clip 'url': 'https://www.9now.com.au/afl-footy-show/2016/clip-ciql02091000g0hp5oktrnytc', 'md5': '17cf47d63ec9323e562c9957a968b565', 'info_dict': {", "from ..compat import compat_str from ..utils import ( ExtractorError, int_or_none,", "first elimination of the competition, teams will have 10 hours", "InfoExtractor from ..compat import compat_str from ..utils import ( ExtractorError,", "}, { # DRM protected 'url': 'https://www.9now.com.au/andrew-marrs-history-of-the-world/season-1/episode-1', 'only_matching': True, },", "'9now.com.au' _VALID_URL = r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES = ['AU'] _TESTS = [{", "in thumbnails_data.items()] return { '_type': 'url_transparent', 'url': smuggle_url( self.BRIGHTCOVE_URL_TEMPLATE %", "{}).get('%sCache' % kind, {}) if not cache: continue common_data =", "= page_data.get(kind, {}).get('%sCache' % kind, {}) if not cache: continue", "webpage, 'page data'), display_id), display_id) for kind in ('episode', 'clip'):", "try_get( common_data, lambda x: x['episode']['video']['brightcoveId'], compat_str) or 'ref:%s' % common_data['episode']['video']['referenceId']", "= self._match_id(url) webpage = self._download_webpage(url, display_id) page_data = self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*({.*?});',", "release_date = unified_strdate(try_get(common_data, lambda x: x['episode']['availability'], compat_str)) thumbnails_data = try_get(common_data,", "'season': (cache.get(current_key) or list(cache.values())[0]).get('season', None) } break else: raise ExtractorError('Unable", "snow globe.', 'uploader_id': '4460760524001', 'timestamp': 1619002200, 'upload_date': '20210421', }, 'expected_warnings':", "video data') if not self.get_param('allow_unplayable_formats') and try_get(common_data, lambda x: x['episode']['video']['drm'],", "}, 'skip': 'Only available in Australia', }, { # episode", "from .common import InfoExtractor from ..compat import compat_str from ..utils", "..compat import compat_str from ..utils import ( ExtractorError, int_or_none, float_or_none,", "compat_str)) release_date = unified_strdate(try_get(common_data, lambda x: x['episode']['availability'], compat_str)) thumbnails_data =", "compat_str)) thumbnails_data = try_get(common_data, lambda x: x['episode']['image']['sizes'], dict) or {}", "if not cache: continue common_data = { 'episode': (cache.get(current_key) or", "ExtractorError('Unable to find video data') if not self.get_param('allow_unplayable_formats') and try_get(common_data,", "x['episode']['video']['drm'], bool): self.report_drm(display_id) brightcove_id = try_get( common_data, lambda x: x['episode']['video']['brightcoveId'],", "x['episode']['episodeNumber'], int) timestamp = unified_timestamp(try_get(common_data, lambda x: x['episode']['airDate'], compat_str)) release_date", "\"on the table\"?', 'uploader_id': '4460760524001', 'upload_date': '20160713', 'timestamp': 1468421266, },", "continue cache = page_data.get(kind, {}).get('%sCache' % kind, {}) if not", "True, }, { # episode of series 'url': 'https://www.9now.com.au/lego-masters/season-3/episode-3', 'info_dict':", "of the competition, teams will have 10 hours to build", "globe.', 'uploader_id': '4460760524001', 'timestamp': 1619002200, 'upload_date': '20210421', }, 'expected_warnings': ['Ignoring", "self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id, {'geo_countries': self._GEO_COUNTRIES}), 'id': video_id, 'title': title, 'description':", "'4460760524001', 'upload_date': '20160713', 'timestamp': 1468421266, }, 'skip': 'Only available in", "# episode 'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching': True, }, { # DRM", "brightcove_id title = try_get(common_data, lambda x: x['episode']['name'], compat_str) season_number =", "'duration': float_or_none(try_get(common_data, lambda x: x['episode']['video']['duration'], float), 1000), 'thumbnails': thumbnails, 'ie_key':", "= '9now.com.au' _VALID_URL = r'https?://(?:www\\.)?9now\\.com\\.au/(?:[^/]+/){2}(?P<id>[^/?#]+)' _GEO_COUNTRIES = ['AU'] _TESTS =", "x: x['season']['seasonNumber'], int) episode_number = try_get(common_data, lambda x: x['episode']['episodeNumber'], int)", "'16801', 'ext': 'mp4', 'title': '<NAME>\\'s Joey Montagna on the potential", "}, { # episode 'url': 'https://www.9now.com.au/afl-footy-show/2016/episode-19', 'only_matching': True, }, {", "= [{ 'id': thumbnail_id, 'url': thumbnail_url, 'width': int_or_none(thumbnail_id[1:]), } for", "default='{}'), display_id, fatal=False) if not page_data: page_data = self._parse_json(self._parse_json(self._search_regex( r'window\\.__data\\s*=\\s*JSON\\.parse\\s*\\(\\s*(\".+?\")\\s*\\)\\s*;'," ]
[ "torch.cuda.HalfTensor): input tensor corresponding to **g** in the paper. ``g``", "with size (slow, 1, 1, ...1), so that if you", "and backward passes. Accepts fp32 or fp16 input; the output", "these gradients will match the precision of ``grad_input``. \"\"\" check_contig_cuda((grad_output),", "``grad_output`` should be contiguous for performance. Returns: Gradient of loss", "size (slow, faster, faster, ...fastest), we want norms with size", "input tensor corresponding to **v** in the paper. ``input`` should", "that these .contiguous() calls will be no-ops. They're present for", "output tensors, # use ctx state to save the norms", "a 1D tensor with values from the tuple: [output_size(0),1,1,...]. \"\"\"", "supported.\") class Fused_Weight_Norm(Function): \"\"\" Custom autograd function that implements weight", "norm_size = (output.size(0),) + (1,)*(output.dim() - 1) elif dim ==", "size {} is not contiguous\" .format(tensor.size())) if not tensor.is_cuda: raise", "= savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input, grad_g, grad_output_contig, savedInput, savedg, savedNorms, ctx.dim) return", "to output **w**. ``grad_output`` should be contiguous for performance. Returns:", "Fused_Weight_Norm(Function): \"\"\" Custom autograd function that implements weight norm, as", "= torch.cuda.FloatTensor(norm_size).contiguous() the constructor sees a tuple: FloatTensor( (output_size(0),1,1,...) )", "the kernels, all calculations are performed in fp32 for numerical", "def forward(ctx, input, g, dim=0): \"\"\" Args: input(torch.cuda.FloatTensor or torch.cuda.HalfTensor):", "tensor corresponding to **v** in the paper. ``input`` should be", "these .contiguous() calls will be no-ops. They're present for safety.", "+ (1,)*(output.dim() - 1) elif dim == output.dim() - 1:", "to ``input`` and ``g``. The precision of these gradients will", "as output, \"t/norms\" will broadcast each element of norms across", "@staticmethod @once_differentiable def backward(ctx, grad_output): \"\"\" Args: grad_output(torch.cuda.FloatTensor or torch.cuda.HalfTensor):", "loss with respect to output **w**. ``grad_output`` should be contiguous", "dim return output @staticmethod @once_differentiable def backward(ctx, grad_output): \"\"\" Args:", "names): for tensor, name in zip(tensors, names): if not tensor.is_contiguous():", "corresponding to **w** in the paper. Output type and precision", "<gh_stars>1-10 import torch from torch.autograd import Variable from torch.autograd.function import", "last dimension of the input tensor is supported. Returns: Output", "tensor, name in zip(tensors, names): if not tensor.is_contiguous(): raise RuntimeError(name+\"", "Gradient of loss with respect to ``input`` and ``g``. The", "# save_for_backward can only save input or output tensors, #", "should be the same type as ``input``. dim(int, optional, default=0):", "raise RuntimeError(\"Currently, Fused_Weight_Norm only supports first or last dimension.\") norms", "only cuda tensors are supported.\") class Fused_Weight_Norm(Function): \"\"\" Custom autograd", "weight norm, as presented in `<https://arxiv.org/abs/1602.07868>`_, along a tensor's slowest", "= {}\" # .format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\" This is ok, new()", "is supported. Returns: Output tensor corresponding to **w** in the", "Variable from torch.autograd.function import Function, once_differentiable import apex_C def check_contig_cuda(tensors,", "want retrieve norms and apply the same normalizing factors to", "ctx.norms = norms ctx.dim = dim return output @staticmethod @once_differentiable", "we want norms with size (slow, 1, 1, ...1), so", "only save input or output tensors, # use ctx state", "of these gradients will match the precision of ``grad_input``. \"\"\"", "that if you want retrieve norms and apply the same", "Within the kernels, all calculations are performed in fp32 for", "a tensor's slowest or fastest dimension using fused kernel launches", "RuntimeError(name+\" with size {} is not contiguous\" .format(tensor.size())) if not", "kernels, all calculations are performed in fp32 for numerical stability,", "first or last dimension of the input tensor is supported.", "1: norm_size = (1,)*(output.dim() - 1) + (output.size(-1),) else: raise", "not tensor.is_cuda: raise RuntimeError(name+\".is_cuda = False.\" \"Currently, only cuda tensors", "RuntimeError(\"Currently, Fused_Weight_Norm only supports first or last dimension.\") norms =", "grad_output_contig.new(grad_output.size()).contiguous() grad_g = savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input, grad_g, grad_output_contig, savedInput, savedg, savedNorms,", "treats a torch.Size object properly. No need to unpack with", "an asterisk via new(*input.size()). \"\"\" output = input.new(input.size()).contiguous() \"\"\" For", "tensor.is_cuda: raise RuntimeError(name+\".is_cuda = False.\" \"Currently, only cuda tensors are", "input, g, dim) ctx.save_for_backward(input, g) # save_for_backward can only save", "tensor.is_contiguous(): raise RuntimeError(name+\" with size {} is not contiguous\" .format(tensor.size()))", "fp32 for numerical stability, regardless of input/output precision. \"\"\" @staticmethod", "tensor is supported. Returns: Output tensor corresponding to **w** in", "norms and apply the same normalizing factors to another Tensor", "perform weightnorm. Currently, only the first or last dimension of", "if not tensor.is_cuda: raise RuntimeError(name+\".is_cuda = False.\" \"Currently, only cuda", "as ``input``. dim(int, optional, default=0): Dimension across which to perform", "torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\" Beware: If you call the following: norms =", "and creates a 1D tensor with values from the tuple:", "of input/output precision. \"\"\" @staticmethod def forward(ctx, input, g, dim=0):", "ok, new() treats a torch.Size object properly. No need to", "# torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size() = {}\" # .format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\" This", "torch.Size object properly. No need to unpack with an asterisk", "``g``. The precision of these gradients will match the precision", "1, 1, ...1), so that if you want retrieve norms", "\"\"\" check_contig_cuda((grad_output), (\"grad_output\")) savedInput, savedg = ctx.saved_tensors savedNorms = ctx.norms", "ctx.norms # We expect that these .contiguous() calls will be", "and apply the same normalizing factors to another Tensor \"t\"", "to **g** in the paper. ``g`` should be the same", "We expect that these .contiguous() calls will be no-ops. They're", "``input`` and ``g``. The precision of these gradients will match", "**g** in the paper. ``g`` should be the same type", "return output @staticmethod @once_differentiable def backward(ctx, grad_output): \"\"\" Args: grad_output(torch.cuda.FloatTensor", "the first or last dimension of the input tensor is", "``input``. dim(int, optional, default=0): Dimension across which to perform weightnorm.", "the output type will match the input type. Within the", "slowest or fastest dimension using fused kernel launches for the", "or torch.cuda.HalfTensor): input tensor corresponding to **v** in the paper.", "performed in fp32 for numerical stability, regardless of input/output precision.", "raise RuntimeError(name+\".is_cuda = False.\" \"Currently, only cuda tensors are supported.\")", "tensors are supported.\") class Fused_Weight_Norm(Function): \"\"\" Custom autograd function that", "of loss with respect to ``input`` and ``g``. The precision", "norms ctx.dim = dim return output @staticmethod @once_differentiable def backward(ctx,", "all calculations are performed in fp32 for numerical stability, regardless", "torch from torch.autograd import Variable from torch.autograd.function import Function, once_differentiable", "savedNorms = ctx.norms # We expect that these .contiguous() calls", "properly. No need to unpack with an asterisk via new(*input.size()).", "present for safety. grad_output_contig = grad_output.contiguous() grad_input = grad_output_contig.new(grad_output.size()).contiguous() grad_g", "output with size (slow, faster, faster, ...fastest), we want norms", "from torch.autograd.function import Function, once_differentiable import apex_C def check_contig_cuda(tensors, names):", "kernel launches for the forward and backward passes. Accepts fp32", "else: raise RuntimeError(\"Currently, Fused_Weight_Norm only supports first or last dimension.\")", "savedg = ctx.saved_tensors savedNorms = ctx.norms # We expect that", "Output tensor corresponding to **w** in the paper. Output type", "# .format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\" This is ok, new() treats a", "= torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\" Beware: If you call the following: norms", "backward passes. Accepts fp32 or fp16 input; the output type", "output **w**. ``grad_output`` should be contiguous for performance. Returns: Gradient", "with respect to ``input`` and ``g``. The precision of these", "or fastest dimension using fused kernel launches for the forward", "[output_size(0),1,1,...]. \"\"\" apex_C.weight_norm_fwd(output, norms, input, g, dim) ctx.save_for_backward(input, g) #", "...fastest), we want norms with size (slow, 1, 1, ...1),", "save_for_backward can only save input or output tensors, # use", "and precision will match type and precision of ``input``. \"\"\"", "type. Within the kernels, all calculations are performed in fp32", "respect to ``input`` and ``g``. The precision of these gradients", "dimension of the input tensor is supported. Returns: Output tensor", "with values from the tuple: [output_size(0),1,1,...]. \"\"\" apex_C.weight_norm_fwd(output, norms, input,", "Returns: Output tensor corresponding to **w** in the paper. Output", "only supports first or last dimension.\") norms = torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\"", "function that implements weight norm, as presented in `<https://arxiv.org/abs/1602.07868>`_, along", "same size as output, \"t/norms\" will broadcast each element of", "g, dim=0): \"\"\" Args: input(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding", "state to save the norms and dimension: ctx.norms = norms", "ctx.saved_tensors savedNorms = ctx.norms # We expect that these .contiguous()", "1) elif dim == output.dim() - 1: norm_size = (1,)*(output.dim()", "in the paper. ``g`` should be the same type as", "along a tensor's slowest or fastest dimension using fused kernel", "the input type. Within the kernels, all calculations are performed", "should be contiguous for performance. Returns: Gradient of loss with", "= (output.size(0),) + (1,)*(output.dim() - 1) elif dim == output.dim()", "should be contiguous. g(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding to", "default=0): Dimension across which to perform weightnorm. Currently, only the", "check_contig_cuda((grad_output), (\"grad_output\")) savedInput, savedg = ctx.saved_tensors savedNorms = ctx.norms #", "element of norms across the corresponding slowest dim of t.", "with size {} is not contiguous\" .format(tensor.size())) if not tensor.is_cuda:", "dim == 0: norm_size = (output.size(0),) + (1,)*(output.dim() - 1)", "implements weight norm, as presented in `<https://arxiv.org/abs/1602.07868>`_, along a tensor's", "tuple: [output_size(0),1,1,...]. \"\"\" apex_C.weight_norm_fwd(output, norms, input, g, dim) ctx.save_for_backward(input, g)", "ctx.dim = dim return output @staticmethod @once_differentiable def backward(ctx, grad_output):", "Args: input(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding to **v** in", "\"\"\" @staticmethod def forward(ctx, input, g, dim=0): \"\"\" Args: input(torch.cuda.FloatTensor", "type and precision will match type and precision of ``input``.", "the same type as ``input``. dim(int, optional, default=0): Dimension across", "via new(*input.size()). \"\"\" output = input.new(input.size()).contiguous() \"\"\" For output with", "(output.size(0),) + (1,)*(output.dim() - 1) elif dim == output.dim() -", "name in zip(tensors, names): if not tensor.is_contiguous(): raise RuntimeError(name+\" with", "corresponding slowest dim of t. \"\"\" if dim == 0:", "will match the precision of ``grad_input``. \"\"\" check_contig_cuda((grad_output), (\"grad_output\")) savedInput,", "will be no-ops. They're present for safety. grad_output_contig = grad_output.contiguous()", "size as output, \"t/norms\" will broadcast each element of norms", "dimension using fused kernel launches for the forward and backward", "forward and backward passes. Accepts fp32 or fp16 input; the", "``input`` should be contiguous. g(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding", "are supported.\") class Fused_Weight_Norm(Function): \"\"\" Custom autograd function that implements", "dim(int, optional, default=0): Dimension across which to perform weightnorm. Currently,", "== output.dim() - 1: norm_size = (1,)*(output.dim() - 1) +", "output @staticmethod @once_differentiable def backward(ctx, grad_output): \"\"\" Args: grad_output(torch.cuda.FloatTensor or", "save input or output tensors, # use ctx state to", "norms, input, g, dim) ctx.save_for_backward(input, g) # save_for_backward can only", "import Variable from torch.autograd.function import Function, once_differentiable import apex_C def", "broadcast each element of norms across the corresponding slowest dim", "output.dim() - 1: norm_size = (1,)*(output.dim() - 1) + (output.size(-1),)", "fused kernel launches for the forward and backward passes. Accepts", "of loss with respect to output **w**. ``grad_output`` should be", "be contiguous. g(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding to **g**", "faster, ...fastest), we want norms with size (slow, 1, 1,", "= (1,)*(output.dim() - 1) + (output.size(-1),) else: raise RuntimeError(\"Currently, Fused_Weight_Norm", "(slow, 1, 1, ...1), so that if you want retrieve", "with size (slow, faster, faster, ...fastest), we want norms with", "and precision of ``input``. \"\"\" # torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size() = {}\"", "Currently, only the first or last dimension of the input", "(1,)*(output.dim() - 1) elif dim == output.dim() - 1: norm_size", "dim) ctx.save_for_backward(input, g) # save_for_backward can only save input or", "weightnorm. Currently, only the first or last dimension of the", "so that if you want retrieve norms and apply the", "== 0: norm_size = (output.size(0),) + (1,)*(output.dim() - 1) elif", "in zip(tensors, names): if not tensor.is_contiguous(): raise RuntimeError(name+\" with size", "\"\"\" apex_C.weight_norm_fwd(output, norms, input, g, dim) ctx.save_for_backward(input, g) # save_for_backward", "= grad_output_contig.new(grad_output.size()).contiguous() grad_g = savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input, grad_g, grad_output_contig, savedInput, savedg,", "import apex_C def check_contig_cuda(tensors, names): for tensor, name in zip(tensors,", "values from the tuple: [output_size(0),1,1,...]. \"\"\" apex_C.weight_norm_fwd(output, norms, input, g,", "input or output tensors, # use ctx state to save", "= ctx.norms # We expect that these .contiguous() calls will", "fp16 input; the output type will match the input type.", "you want retrieve norms and apply the same normalizing factors", "norms with size (slow, 1, 1, ...1), so that if", "are performed in fp32 for numerical stability, regardless of input/output", "Dimension across which to perform weightnorm. Currently, only the first", "- 1) elif dim == output.dim() - 1: norm_size =", "elif dim == output.dim() - 1: norm_size = (1,)*(output.dim() -", "1D tensor with values from the tuple: [output_size(0),1,1,...]. \"\"\" apex_C.weight_norm_fwd(output,", "forward(ctx, input, g, dim=0): \"\"\" Args: input(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input", "FloatTensor( (output_size(0),1,1,...) ) and creates a 1D tensor with values", "(output_size(0),1,1,...) ) and creates a 1D tensor with values from", "(slow, faster, faster, ...fastest), we want norms with size (slow,", "apex_C.weight_norm_fwd(output, norms, input, g, dim) ctx.save_for_backward(input, g) # save_for_backward can", "grad_g = savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input, grad_g, grad_output_contig, savedInput, savedg, savedNorms, ctx.dim)", "launches for the forward and backward passes. Accepts fp32 or", "Beware: If you call the following: norms = torch.cuda.FloatTensor(norm_size).contiguous() the", "contiguous for performance. Returns: Gradient of loss with respect to", "apex_C def check_contig_cuda(tensors, names): for tensor, name in zip(tensors, names):", "tensor's slowest or fastest dimension using fused kernel launches for", "zip(tensors, names): if not tensor.is_contiguous(): raise RuntimeError(name+\" with size {}", "Custom autograd function that implements weight norm, as presented in", "Gradient of loss with respect to output **w**. ``grad_output`` should", "expect that these .contiguous() calls will be no-ops. They're present", "g(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding to **g** in the", "output, \"t/norms\" will broadcast each element of norms across the", "is not contiguous\" .format(tensor.size())) if not tensor.is_cuda: raise RuntimeError(name+\".is_cuda =", "will match type and precision of ``input``. \"\"\" # torch.cuda.nvtx.range_push(\"FusedNorm.forward,", "optional, default=0): Dimension across which to perform weightnorm. Currently, only", "- 1) + (output.size(-1),) else: raise RuntimeError(\"Currently, Fused_Weight_Norm only supports", "the constructor sees a tuple: FloatTensor( (output_size(0),1,1,...) ) and creates", "import Function, once_differentiable import apex_C def check_contig_cuda(tensors, names): for tensor,", "the forward and backward passes. Accepts fp32 or fp16 input;", "output = input.new(input.size()).contiguous() \"\"\" For output with size (slow, faster,", "of norms across the corresponding slowest dim of t. \"\"\"", "raise RuntimeError(name+\" with size {} is not contiguous\" .format(tensor.size())) if", "in the paper. Output type and precision will match type", "following: norms = torch.cuda.FloatTensor(norm_size).contiguous() the constructor sees a tuple: FloatTensor(", "@staticmethod def forward(ctx, input, g, dim=0): \"\"\" Args: input(torch.cuda.FloatTensor or", "class Fused_Weight_Norm(Function): \"\"\" Custom autograd function that implements weight norm,", "for tensor, name in zip(tensors, names): if not tensor.is_contiguous(): raise", "torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size() = {}\" # .format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\" This is", "paper. ``input`` should be contiguous. g(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor", "factors to another Tensor \"t\" with the same size as", "the norms and dimension: ctx.norms = norms ctx.dim = dim", "norm, as presented in `<https://arxiv.org/abs/1602.07868>`_, along a tensor's slowest or", "They're present for safety. grad_output_contig = grad_output.contiguous() grad_input = grad_output_contig.new(grad_output.size()).contiguous()", "each element of norms across the corresponding slowest dim of", "1, ...1), so that if you want retrieve norms and", "of t. \"\"\" if dim == 0: norm_size = (output.size(0),)", "new() treats a torch.Size object properly. No need to unpack", "match the input type. Within the kernels, all calculations are", "a tuple: FloatTensor( (output_size(0),1,1,...) ) and creates a 1D tensor", "# We expect that these .contiguous() calls will be no-ops.", "from torch.autograd import Variable from torch.autograd.function import Function, once_differentiable import", "input/output precision. \"\"\" @staticmethod def forward(ctx, input, g, dim=0): \"\"\"", "input tensor corresponding to **g** in the paper. ``g`` should", "sees a tuple: FloatTensor( (output_size(0),1,1,...) ) and creates a 1D", "= ctx.saved_tensors savedNorms = ctx.norms # We expect that these", "if dim == 0: norm_size = (output.size(0),) + (1,)*(output.dim() -", "torch.cuda.FloatTensor(norm_size).contiguous() the constructor sees a tuple: FloatTensor( (output_size(0),1,1,...) ) and", "tensor corresponding to **w** in the paper. Output type and", "the paper. ``g`` should be the same type as ``input``.", "or torch.cuda.HalfTensor): input tensor corresponding to **g** in the paper.", "as presented in `<https://arxiv.org/abs/1602.07868>`_, along a tensor's slowest or fastest", "to save the norms and dimension: ctx.norms = norms ctx.dim", "presented in `<https://arxiv.org/abs/1602.07868>`_, along a tensor's slowest or fastest dimension", "in `<https://arxiv.org/abs/1602.07868>`_, along a tensor's slowest or fastest dimension using", "save the norms and dimension: ctx.norms = norms ctx.dim =", "tensors, # use ctx state to save the norms and", "only the first or last dimension of the input tensor", "apply the same normalizing factors to another Tensor \"t\" with", "is ok, new() treats a torch.Size object properly. No need", "norms = torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\" Beware: If you call the following:", "to another Tensor \"t\" with the same size as output,", "\"\"\" Custom autograd function that implements weight norm, as presented", "paper. Output type and precision will match type and precision", "norm_size = (1,)*(output.dim() - 1) + (output.size(-1),) else: raise RuntimeError(\"Currently,", "or torch.cuda.HalfTensor): Gradient of loss with respect to output **w**.", "calls will be no-ops. They're present for safety. grad_output_contig =", "be contiguous for performance. Returns: Gradient of loss with respect", "you call the following: norms = torch.cuda.FloatTensor(norm_size).contiguous() the constructor sees", "Fused_Weight_Norm only supports first or last dimension.\") norms = torch.cuda.FloatTensor(*norm_size).contiguous()", "which to perform weightnorm. Currently, only the first or last", "(1,)*(output.dim() - 1) + (output.size(-1),) else: raise RuntimeError(\"Currently, Fused_Weight_Norm only", "be the same type as ``input``. dim(int, optional, default=0): Dimension", "supports first or last dimension.\") norms = torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\" Beware:", "input(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding to **v** in the", "`<https://arxiv.org/abs/1602.07868>`_, along a tensor's slowest or fastest dimension using fused", "will broadcast each element of norms across the corresponding slowest", "@once_differentiable def backward(ctx, grad_output): \"\"\" Args: grad_output(torch.cuda.FloatTensor or torch.cuda.HalfTensor): Gradient", "constructor sees a tuple: FloatTensor( (output_size(0),1,1,...) ) and creates a", "torch.autograd import Variable from torch.autograd.function import Function, once_differentiable import apex_C", "+ (output.size(-1),) else: raise RuntimeError(\"Currently, Fused_Weight_Norm only supports first or", "= dim return output @staticmethod @once_differentiable def backward(ctx, grad_output): \"\"\"", "No need to unpack with an asterisk via new(*input.size()). \"\"\"", "apex_C.weight_norm_bwd(grad_input, grad_g, grad_output_contig, savedInput, savedg, savedNorms, ctx.dim) return grad_input, grad_g,", "type will match the input type. Within the kernels, all", "from the tuple: [output_size(0),1,1,...]. \"\"\" apex_C.weight_norm_fwd(output, norms, input, g, dim)", ") and creates a 1D tensor with values from the", "``grad_input``. \"\"\" check_contig_cuda((grad_output), (\"grad_output\")) savedInput, savedg = ctx.saved_tensors savedNorms =", "and dimension: ctx.norms = norms ctx.dim = dim return output", "use ctx state to save the norms and dimension: ctx.norms", "torch.cuda.HalfTensor): Gradient of loss with respect to output **w**. ``grad_output``", "match type and precision of ``input``. \"\"\" # torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size()", "faster, faster, ...fastest), we want norms with size (slow, 1,", "fp32 or fp16 input; the output type will match the", "for the forward and backward passes. Accepts fp32 or fp16", "asterisk via new(*input.size()). \"\"\" output = input.new(input.size()).contiguous() \"\"\" For output", "grad_input = grad_output_contig.new(grad_output.size()).contiguous() grad_g = savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input, grad_g, grad_output_contig, savedInput,", "(\"grad_output\")) savedInput, savedg = ctx.saved_tensors savedNorms = ctx.norms # We", "# use ctx state to save the norms and dimension:", "not contiguous\" .format(tensor.size())) if not tensor.is_cuda: raise RuntimeError(name+\".is_cuda = False.\"", "same normalizing factors to another Tensor \"t\" with the same", "check_contig_cuda(tensors, names): for tensor, name in zip(tensors, names): if not", "across which to perform weightnorm. Currently, only the first or", "g, dim) ctx.save_for_backward(input, g) # save_for_backward can only save input", "of ``grad_input``. \"\"\" check_contig_cuda((grad_output), (\"grad_output\")) savedInput, savedg = ctx.saved_tensors savedNorms", "the precision of ``grad_input``. \"\"\" check_contig_cuda((grad_output), (\"grad_output\")) savedInput, savedg =", ".contiguous() calls will be no-ops. They're present for safety. grad_output_contig", "input tensor is supported. Returns: Output tensor corresponding to **w**", "regardless of input/output precision. \"\"\" @staticmethod def forward(ctx, input, g,", ".format(tensor.size())) if not tensor.is_cuda: raise RuntimeError(name+\".is_cuda = False.\" \"Currently, only", "input type. Within the kernels, all calculations are performed in", "ctx state to save the norms and dimension: ctx.norms =", "torch.autograd.function import Function, once_differentiable import apex_C def check_contig_cuda(tensors, names): for", "False.\" \"Currently, only cuda tensors are supported.\") class Fused_Weight_Norm(Function): \"\"\"", "and ``g``. The precision of these gradients will match the", "autograd function that implements weight norm, as presented in `<https://arxiv.org/abs/1602.07868>`_,", "1) + (output.size(-1),) else: raise RuntimeError(\"Currently, Fused_Weight_Norm only supports first", "{}\" # .format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\" This is ok, new() treats", "numerical stability, regardless of input/output precision. \"\"\" @staticmethod def forward(ctx,", "for performance. Returns: Gradient of loss with respect to ``input``", "the same size as output, \"t/norms\" will broadcast each element", "grad_output_contig = grad_output.contiguous() grad_input = grad_output_contig.new(grad_output.size()).contiguous() grad_g = savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input,", "input; the output type will match the input type. Within", "the corresponding slowest dim of t. \"\"\" if dim ==", "size (slow, 1, 1, ...1), so that if you want", "or last dimension.\") norms = torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\" Beware: If you", "RuntimeError(name+\".is_cuda = False.\" \"Currently, only cuda tensors are supported.\") class", "that implements weight norm, as presented in `<https://arxiv.org/abs/1602.07868>`_, along a", "dimension: ctx.norms = norms ctx.dim = dim return output @staticmethod", "= norms ctx.dim = dim return output @staticmethod @once_differentiable def", "to **v** in the paper. ``input`` should be contiguous. g(torch.cuda.FloatTensor", "\"\"\" For output with size (slow, faster, faster, ...fastest), we", "dimension.\") norms = torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\" Beware: If you call the", "with respect to output **w**. ``grad_output`` should be contiguous for", "grad_g, grad_output_contig, savedInput, savedg, savedNorms, ctx.dim) return grad_input, grad_g, None", "tensor with values from the tuple: [output_size(0),1,1,...]. \"\"\" apex_C.weight_norm_fwd(output, norms,", "precision of ``input``. \"\"\" # torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size() = {}\" #", "respect to output **w**. ``grad_output`` should be contiguous for performance.", "call the following: norms = torch.cuda.FloatTensor(norm_size).contiguous() the constructor sees a", "new(*input.size()). \"\"\" output = input.new(input.size()).contiguous() \"\"\" For output with size", "savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input, grad_g, grad_output_contig, savedInput, savedg, savedNorms, ctx.dim) return grad_input,", "grad_output(torch.cuda.FloatTensor or torch.cuda.HalfTensor): Gradient of loss with respect to output", "\"\"\" Args: input(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding to **v**", "``input``. \"\"\" # torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size() = {}\" # .format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\"))", "unpack with an asterisk via new(*input.size()). \"\"\" output = input.new(input.size()).contiguous()", "gradients will match the precision of ``grad_input``. \"\"\" check_contig_cuda((grad_output), (\"grad_output\"))", "safety. grad_output_contig = grad_output.contiguous() grad_input = grad_output_contig.new(grad_output.size()).contiguous() grad_g = savedg.new(savedg.size()).contiguous()", "to **w** in the paper. Output type and precision will", "or output tensors, # use ctx state to save the", "import torch from torch.autograd import Variable from torch.autograd.function import Function,", "\"\"\" Beware: If you call the following: norms = torch.cuda.FloatTensor(norm_size).contiguous()", "...1), so that if you want retrieve norms and apply", "in fp32 for numerical stability, regardless of input/output precision. \"\"\"", "If you call the following: norms = torch.cuda.FloatTensor(norm_size).contiguous() the constructor", "normalizing factors to another Tensor \"t\" with the same size", "or last dimension of the input tensor is supported. Returns:", "\"t/norms\" will broadcast each element of norms across the corresponding", "**w** in the paper. Output type and precision will match", "passes. Accepts fp32 or fp16 input; the output type will", "the input tensor is supported. Returns: Output tensor corresponding to", "0: norm_size = (output.size(0),) + (1,)*(output.dim() - 1) elif dim", ".format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\" This is ok, new() treats a torch.Size", "last dimension.\") norms = torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\" Beware: If you call", "ctx.save_for_backward(input, g) # save_for_backward can only save input or output", "want norms with size (slow, 1, 1, ...1), so that", "= False.\" \"Currently, only cuda tensors are supported.\") class Fused_Weight_Norm(Function):", "precision will match type and precision of ``input``. \"\"\" #", "torch.cuda.HalfTensor): input tensor corresponding to **v** in the paper. ``input``", "corresponding to **g** in the paper. ``g`` should be the", "Function, once_differentiable import apex_C def check_contig_cuda(tensors, names): for tensor, name", "**v** in the paper. ``input`` should be contiguous. g(torch.cuda.FloatTensor or", "Tensor \"t\" with the same size as output, \"t/norms\" will", "will match the input type. Within the kernels, all calculations", "\"\"\" This is ok, new() treats a torch.Size object properly.", "= grad_output.contiguous() grad_input = grad_output_contig.new(grad_output.size()).contiguous() grad_g = savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input, grad_g,", "names): if not tensor.is_contiguous(): raise RuntimeError(name+\" with size {} is", "output type will match the input type. Within the kernels,", "another Tensor \"t\" with the same size as output, \"t/norms\"", "to unpack with an asterisk via new(*input.size()). \"\"\" output =", "object properly. No need to unpack with an asterisk via", "input.size() = {}\" # .format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\" This is ok,", "contiguous. g(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding to **g** in", "Output type and precision will match type and precision of", "norms = torch.cuda.FloatTensor(norm_size).contiguous() the constructor sees a tuple: FloatTensor( (output_size(0),1,1,...)", "dim of t. \"\"\" if dim == 0: norm_size =", "not tensor.is_contiguous(): raise RuntimeError(name+\" with size {} is not contiguous\"", "paper. ``g`` should be the same type as ``input``. dim(int,", "stability, regardless of input/output precision. \"\"\" @staticmethod def forward(ctx, input,", "grad_output): \"\"\" Args: grad_output(torch.cuda.FloatTensor or torch.cuda.HalfTensor): Gradient of loss with", "backward(ctx, grad_output): \"\"\" Args: grad_output(torch.cuda.FloatTensor or torch.cuda.HalfTensor): Gradient of loss", "**w**. ``grad_output`` should be contiguous for performance. Returns: Gradient of", "need to unpack with an asterisk via new(*input.size()). \"\"\" output", "g) # save_for_backward can only save input or output tensors,", "Args: grad_output(torch.cuda.FloatTensor or torch.cuda.HalfTensor): Gradient of loss with respect to", "the paper. ``input`` should be contiguous. g(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input", "dim == output.dim() - 1: norm_size = (1,)*(output.dim() - 1)", "slowest dim of t. \"\"\" if dim == 0: norm_size", "same type as ``input``. dim(int, optional, default=0): Dimension across which", "using fused kernel launches for the forward and backward passes.", "no-ops. They're present for safety. grad_output_contig = grad_output.contiguous() grad_input =", "\"\"\" if dim == 0: norm_size = (output.size(0),) + (1,)*(output.dim()", "contiguous\" .format(tensor.size())) if not tensor.is_cuda: raise RuntimeError(name+\".is_cuda = False.\" \"Currently,", "a torch.Size object properly. No need to unpack with an", "the tuple: [output_size(0),1,1,...]. \"\"\" apex_C.weight_norm_fwd(output, norms, input, g, dim) ctx.save_for_backward(input,", "can only save input or output tensors, # use ctx", "of the input tensor is supported. Returns: Output tensor corresponding", "with the same size as output, \"t/norms\" will broadcast each", "savedInput, savedg = ctx.saved_tensors savedNorms = ctx.norms # We expect", "performance. Returns: Gradient of loss with respect to ``input`` and", "t. \"\"\" if dim == 0: norm_size = (output.size(0),) +", "{} is not contiguous\" .format(tensor.size())) if not tensor.is_cuda: raise RuntimeError(name+\".is_cuda", "be no-ops. They're present for safety. grad_output_contig = grad_output.contiguous() grad_input", "norms across the corresponding slowest dim of t. \"\"\" if", "def check_contig_cuda(tensors, names): for tensor, name in zip(tensors, names): if", "tensor corresponding to **g** in the paper. ``g`` should be", "\"t\" with the same size as output, \"t/norms\" will broadcast", "check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\" This is ok, new() treats a torch.Size object", "the paper. Output type and precision will match type and", "supported. Returns: Output tensor corresponding to **w** in the paper.", "The precision of these gradients will match the precision of", "calculations are performed in fp32 for numerical stability, regardless of", "\"\"\" # torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size() = {}\" # .format(input.size())) check_contig_cuda((input,g),(\"input\",\"g\")) \"\"\"", "corresponding to **v** in the paper. ``input`` should be contiguous.", "``g`` should be the same type as ``input``. dim(int, optional,", "if not tensor.is_contiguous(): raise RuntimeError(name+\" with size {} is not", "dim=0): \"\"\" Args: input(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor corresponding to", "\"Currently, only cuda tensors are supported.\") class Fused_Weight_Norm(Function): \"\"\" Custom", "first or last dimension.\") norms = torch.cuda.FloatTensor(*norm_size).contiguous() \"\"\" Beware: If", "grad_output.contiguous() grad_input = grad_output_contig.new(grad_output.size()).contiguous() grad_g = savedg.new(savedg.size()).contiguous() apex_C.weight_norm_bwd(grad_input, grad_g, grad_output_contig,", "if you want retrieve norms and apply the same normalizing", "def backward(ctx, grad_output): \"\"\" Args: grad_output(torch.cuda.FloatTensor or torch.cuda.HalfTensor): Gradient of", "for numerical stability, regardless of input/output precision. \"\"\" @staticmethod def", "input, g, dim=0): \"\"\" Args: input(torch.cuda.FloatTensor or torch.cuda.HalfTensor): input tensor", "in the paper. ``input`` should be contiguous. g(torch.cuda.FloatTensor or torch.cuda.HalfTensor):", "precision. \"\"\" @staticmethod def forward(ctx, input, g, dim=0): \"\"\" Args:", "cuda tensors are supported.\") class Fused_Weight_Norm(Function): \"\"\" Custom autograd function", "type and precision of ``input``. \"\"\" # torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size() =", "across the corresponding slowest dim of t. \"\"\" if dim", "precision of these gradients will match the precision of ``grad_input``.", "of ``input``. \"\"\" # torch.cuda.nvtx.range_push(\"FusedNorm.forward, input.size() = {}\" # .format(input.size()))", "Returns: Gradient of loss with respect to ``input`` and ``g``.", "precision of ``grad_input``. \"\"\" check_contig_cuda((grad_output), (\"grad_output\")) savedInput, savedg = ctx.saved_tensors", "for safety. grad_output_contig = grad_output.contiguous() grad_input = grad_output_contig.new(grad_output.size()).contiguous() grad_g =", "This is ok, new() treats a torch.Size object properly. No", "the following: norms = torch.cuda.FloatTensor(norm_size).contiguous() the constructor sees a tuple:", "once_differentiable import apex_C def check_contig_cuda(tensors, names): for tensor, name in", "Accepts fp32 or fp16 input; the output type will match", "or fp16 input; the output type will match the input", "type as ``input``. dim(int, optional, default=0): Dimension across which to", "retrieve norms and apply the same normalizing factors to another", "input.new(input.size()).contiguous() \"\"\" For output with size (slow, faster, faster, ...fastest),", "= input.new(input.size()).contiguous() \"\"\" For output with size (slow, faster, faster,", "creates a 1D tensor with values from the tuple: [output_size(0),1,1,...].", "(output.size(-1),) else: raise RuntimeError(\"Currently, Fused_Weight_Norm only supports first or last", "tuple: FloatTensor( (output_size(0),1,1,...) ) and creates a 1D tensor with", "the same normalizing factors to another Tensor \"t\" with the", "For output with size (slow, faster, faster, ...fastest), we want", "with an asterisk via new(*input.size()). \"\"\" output = input.new(input.size()).contiguous() \"\"\"", "fastest dimension using fused kernel launches for the forward and", "loss with respect to ``input`` and ``g``. The precision of", "\"\"\" output = input.new(input.size()).contiguous() \"\"\" For output with size (slow,", "- 1: norm_size = (1,)*(output.dim() - 1) + (output.size(-1),) else:", "match the precision of ``grad_input``. \"\"\" check_contig_cuda((grad_output), (\"grad_output\")) savedInput, savedg", "norms and dimension: ctx.norms = norms ctx.dim = dim return", "to perform weightnorm. Currently, only the first or last dimension", "\"\"\" Args: grad_output(torch.cuda.FloatTensor or torch.cuda.HalfTensor): Gradient of loss with respect" ]
[ "\"w\") self.install_required_tools() scenario = self.get_scenario() self.exec_id = self.label self.script =", "from bzt.utils import unzip, RequiredTool, JavaVM, shutdown_process, TclLibrary, FileReader, RESOURCES_DIR", "None start = time.time() for lnum, line in enumerate(self.lines): if", "#%s\" % test_id source_id = '' # maybe use worker_id", "default_address else \"\" self.root.append(self.gen_statement('request = HTTPRequest(%s)' % url_arg, indent=0)) self.root.append(self.gen_statement('test", "is not None: with open(self.stderr.name) as fds: contents = fds.read().strip()", "0) if load.concurrency: fds.write(\"grinder.threads=%s\\n\" % load.concurrency) if load.duration: fds.write(\"grinder.duration=%s\\n\" %", "if prop_file: resource_files.append(prop_file) return resource_files def get_error_diagnostics(self): diagnostics = []", "self.DETAILS_REGEX.match(line) if not matched: continue if worker_id == matched.group(1) and", "self.root.append(self.gen_new_line()) default_address = self.scenario.get(\"default-address\") url_arg = \"url=%r\" % default_address if", "grinder_path = settings.get(\"path\", self.LOCAL_PATH) grinder_path = get_full_path(grinder_path) download_link = settings.get(\"download-link\",", "= req.method.upper() url = req.url local_headers = req.headers params =", "* 1000)) fds.write(\"# taurus load values in case you need", "= [] if self.page_source is not None: self.log.debug('Parsing mirrors...') base_link", "if not line.endswith(\"\\n\"): self.partial_buffer += line return None, None line", "that returns next portion of data :param last_pass: \"\"\" self.log.debug(\"Reading", "not in self.known_threads: self.known_threads.add(thread_id) self.concurrency += 1 url, error_msg =", "as fast as possible. \"\"\" self.env.set({\"T_GRINDER_PREFIX\": self.exec_id}) self.process = self.execute(self.cmd_line)", "line[:line.find(' ')] line = line[line.find(' '):] data_fields = line.split(self.DELIMITER) if", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "them\\n\") fds.write(\"taurus.concurrency=%s\\n\" % load.concurrency) fds.write(\"taurus.throughput=%s\\n\" % load.throughput) fds.write(\"taurus.ramp_up=%s\\n\" % load.ramp_up)", "# self.concurrency += 1 pass elif line_parts[1] == 'finished': if", ":return: \"\"\" base_props_file = self.settings.get(\"properties-file\") if base_props_file: fds.write(\"# Base Properies", "still running. Also checks if resulting logs contains any data", "super(GrinderExecutor, self).post_process() def __scenario_from_requests(self): \"\"\" Generate grinder scenario from requests", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "/ 1000.0) r_time = int(data_fields[self.idx[\"Test time\"]]) / 1000.0 latency =", "indent=0)) global_timeout = dehumanize_time(self.scenario.get(\"timeout\", None)) if global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" % int(global_timeout", "# maybe use worker_id somehow? return t_stamp, label, self.concurrency, r_time,", "import ScenarioExecutor, FileLister, HavingInstallableTools, SelfDiagnosable from bzt.modules.aggregator import ConsolidatingAggregator, ResultsReader", "self.get_error_diagnostics()) return True return False def shutdown(self): \"\"\" If tool", "url, params, headers))) think_time = dehumanize_time(req.priority_option('think-time')) if think_time: main_method.append(self.gen_statement(\"grinder.sleep(%s)\" %", "header, value in iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r, %r),\" % (header, value), indent=4))", "ANY KIND, either express or implied. See the License for", "__init__(self, scenario, parent_logger): super(GrinderScriptBuilder, self).__init__(scenario, parent_logger) self.label = \"BZT Requests\"", "self.install_required_tools() scenario = self.get_scenario() self.exec_id = self.label self.script = self.get_script_path()", "= None self.process = None self.end_time = None self.retcode =", "+ \"]\" def gen_runner_class(self): runner_classdef = self.gen_class_definition(\"TestRunner\", [\"object\"]) sleep_method =", "time.time() - start if duration < 0.001: duration = 0.001", "None return data_fields, worker_id def __parse_prev_lines(self, worker_id, lnum, r_code, bytes_count):", "LOCAL_PATH = \"~/.bzt/grinder-taurus/lib/grinder.jar\" def __init__(self, config=None, **kwargs): settings = config", "worker_id = line[:line.find(' ')] line = line[line.find(' '):] data_fields =", "builder = GrinderScriptBuilder(self.get_scenario(), self.log) builder.label = self.label builder.build_source_code() builder.save(script) return", "= time.time() self.log.debug(\"Grinder worked for %s seconds\", self.end_time - self.start_time)", "indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults = HTTPPluginControl.getConnectionDefaults()\", indent=0)) self.root.append(self.gen_statement(\"utilities = HTTPPluginControl.getHTTPUtilities()\", indent=0))", "worker_id, lnum) if lnum is not None: duration = time.time()", "**kwargs): settings = config or {} grinder_path = settings.get(\"path\", self.LOCAL_PATH)", "self.start_time: self.end_time = time.time() self.log.debug(\"Grinder worked for %s seconds\", self.end_time", "line_parts[1] == 'finished': if self.concurrency > 0: self.concurrency -= 1", "TestRunner\" else: error_msg = None # suppress errors if self.report_by_url:", "because we have ramp-down equal to rampup return self.widget def", "value), indent=4)) self.root.append(self.gen_statement(\"])\", indent=0)) global_timeout = dehumanize_time(self.scenario.get(\"timeout\", None)) if global_timeout:", "base_props_file) with open(base_props_file) as bpf: fds.write(bpf.read()) fds.write(\"# Base Properies File", "= self.gen_method_definition(\"__call__\", [\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for req in self.scenario.get_requests(): if not", "params, headers))) think_time = dehumanize_time(req.priority_option('think-time')) if think_time: main_method.append(self.gen_statement(\"grinder.sleep(%s)\" % int(think_time", "% (key, val)) fds.write(\"# Scenario Properies End\\n\\n\") def __write_bzt_props(self, fds):", "\".py\") builder = GrinderScriptBuilder(self.get_scenario(), self.log) builder.label = self.label builder.build_source_code() builder.save(script)", "\"~/.bzt/grinder-taurus/lib/grinder.jar\" def __init__(self, config=None, **kwargs): settings = config or {}", "matched.group(5): return matched.group(2), matched.group(4) return url, error_msg class Grinder(RequiredTool): #", "file artifact \"\"\" if self.kpi_file: self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor, self).post_process() def __scenario_from_requests(self):", "self.log.debug('Parsing mirrors...') base_link = \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\ \"-binary.zip/download?use_mirror={mirror}\" li_search_pattern = re.compile(r'<li", "contents) if self.stderr is not None: with open(self.stderr.name) as fds:", "int(global_timeout * 1000), indent=0)) cookie_flag = int(self.scenario.get(\"store-cookie\", True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" %", "2015 BlazeMeter Inc. Licensed under the Apache License, Version 2.0", "Script Properies File End: %s\\n\\n\" % script_props_file) # scenario props", "%s\\n\" % script_props_file) with open(script_props_file) as spf: fds.write(spf.read()) fds.write(\"# Script", "error_msg = \"HTTP %s\" % r_code else: error_msg = \"Java", "fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) if self.stderr is", "open(self.stderr.name) as fds: contents = fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\"", ":return: \"\"\" script_props_file = scenario.get(\"properties-file\") if script_props_file: fds.write(\"# Script Properies", "method that returns next portion of data :param last_pass: \"\"\"", "from net.grinder.script import Test from net.grinder.script.Grinder import grinder from net.grinder.plugin.http", "executor module \"\"\" def __init__(self): super(GrinderExecutor, self).__init__() self.script = None", "items) + \"]\" def gen_runner_class(self): runner_classdef = self.gen_class_definition(\"TestRunner\", [\"object\"]) sleep_method", "dest = get_full_path(self.tool_path, step_up=2) self.log.info(\"Will install %s into %s\", self.tool_name,", "[\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for req in self.scenario.get_requests(): if not isinstance(req, HTTPRequest):", "req.headers params = \"[]\" headers = self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r, %s, %s)\"", "JarTool(?) VERSION = \"3.11\" LOCAL_PATH = \"~/.bzt/grinder-taurus/lib/grinder.jar\" def __init__(self, config=None,", "response errors']]): if not error_msg: if r_code != '0': error_msg", "worker_id somehow? return t_stamp, label, self.concurrency, r_time, con_time, latency, r_code,", "self.exec_id) fds.write(\"grinder.script=%s\\n\" % self.script.replace(os.path.sep, \"/\")) fds.write(\"grinder.logDirectory=%s\\n\" % self.engine.artifacts_dir.replace(os.path.sep, \"/\")) load", "* inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\")) sleep_method.append(self.gen_statement(\"if sleep_time: grinder.logger.info('slept for %sms' %", "bzt.modules.java import TaurusJavaHelper from bzt.requests_model import HTTPRequest from bzt.six import", "base properties and base properties file contents to fds :param", "base_props: fds.write(\"# Base Properies Start\\n\") for key, val in iteritems(base_props):", "See the License for the specific language governing permissions and", "label = url elif test_id in self.test_names: label = self.test_names[test_id]", "self.idx = {} self.partial_buffer = \"\" self.start_time = 0 self.end_time", "still running - let's stop it. \"\"\" shutdown_process(self.process, self.log) if", "self._get_tool(TaurusJavaHelper) required_tools = [self._get_tool(TclLibrary), self._get_tool(JavaVM), self.java_helper, grinder] for tool in", "test_name = ' '.join(line_parts[6:]) self.test_names[test_id] = test_name self.log.debug(\"Recognized test id", "if not tool.check_if_installed(): tool.install() def get_widget(self): if not self.widget: if", "/ 1000.0 r_code = data_fields[self.idx[\"HTTP response code\"]].strip() con_time = int(data_fields[self.idx[\"Time", "(self.partial_buffer, line) self.partial_buffer = \"\" line = line.strip() if not", "0: self.concurrency -= 1 elif set(line_parts[1:5]) == {'Test', 'name', 'for',", "HavingInstallableTools, SelfDiagnosable): \"\"\" Grinder executor module \"\"\" def __init__(self): super(GrinderExecutor,", "if not self.scenario.get(\"keepalive\", True): headers['Connection'] = 'close' if headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\",", "fds.write(\"grinder.logDirectory=%s\\n\" % self.engine.artifacts_dir.replace(os.path.sep, \"/\")) load = self.get_load() if load.iterations or", "not self.script: if \"requests\" in scenario: self.script = self.__scenario_from_requests() else:", "load.concurrency) if load.duration: fds.write(\"grinder.duration=%s\\n\" % int(load.duration * 1000)) fds.write(\"# taurus", "in required_tools: if not tool.check_if_installed(): tool.install() def get_widget(self): if not", "header, value in items) + \"]\" def gen_runner_class(self): runner_classdef =", "= self.get_script_path() if script_file_path: resource_files.append(script_file_path) prop_file = self.get_scenario().get(\"properties-file\") if prop_file:", "self.tool_path) try: out, err = self.call([\"java\", \"-classpath\", self.tool_path, \"net.grinder.Grinder\"]) if", "= line.strip() if not line.startswith('data.'): line_parts = line.split(' ') if", "Version 2.0 (the \"License\"); you may not use this file", "if self.stderr is not None: with open(self.stderr.name) as fds: contents", "build_source_code(self): self.log.debug(\"Generating Python script for Grinder\") self.root.append(self.gen_comment(\"This script was generated", "not use this file except in compliance with the License.", "in enumerate(self.lines): if not self.idx: if not line.startswith('data.'): self.__split(line) #", "Inc. Licensed under the Apache License, Version 2.0 (the \"License\");", "was generated by Taurus\", indent=0)) self.root.append(self.add_imports()) self.root.append(self.gen_new_line()) default_address = self.scenario.get(\"default-address\")", "you may not use this file except in compliance with", "if self.retcode != 0: raise ToolError(\"Gatling tool exited with non-zero", "line.startswith('data.'): self.__split(line) # to capture early test name records continue", "self.LOCAL_PATH) grinder_path = get_full_path(grinder_path) download_link = settings.get(\"download-link\", \"\") super(Grinder, self).__init__(tool_path=grinder_path,", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "duration = 0.001 self.log.debug(\"Log reading speed: %s lines/s\", (lnum +", "the License. You may obtain a copy of the License", "if not isinstance(req, HTTPRequest): msg = \"Grinder script generator doesn't", "\"\"\" if self.kpi_file: self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor, self).post_process() def __scenario_from_requests(self): \"\"\" Generate", "= line_parts[5][:-1] test_name = ' '.join(line_parts[6:]) self.test_names[test_id] = test_name self.log.debug(\"Recognized", "you need them\\n\") fds.write(\"taurus.concurrency=%s\\n\" % load.concurrency) fds.write(\"taurus.throughput=%s\\n\" % load.throughput) fds.write(\"taurus.ramp_up=%s\\n\"", "fds.write(\"taurus.iterations=%s\\n\" % load.iterations) fds.write(\"# BZT Properies End\\n\") def prepare(self): self.stdout", "self.__scenario_from_requests() else: msg = \"There must be a script file", "props to fds :param fds: :param scenario: dict :return: \"\"\"", "= Test(1, \"%s\")' % self.label, indent=0)) self.root.append(self.gen_statement('test.record(request)', indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults", "None if len(data_fields) < max(self.idx.values()): return None, None return data_fields,", "\"%s%s\" % (self.partial_buffer, line) self.partial_buffer = \"\" line = line.strip()", "\"\"\" Grinder executor module \"\"\" def __init__(self): super(GrinderExecutor, self).__init__() self.script", "%s\" % self.retcode, self.get_error_diagnostics()) return True return False def shutdown(self):", "None: with open(self.stdout.name) as fds: contents = fds.read().strip() if contents.strip():", "line return None, None line = \"%s%s\" % (self.partial_buffer, line)", "def resource_files(self): resource_files = [] script_file_path = self.get_script_path() if script_file_path:", "= self.get_scenario().get(\"properties-file\") if prop_file: resource_files.append(prop_file) return resource_files def get_error_diagnostics(self): diagnostics", "(logback-worker.xml) self.env.add_path({\"CLASSPATH\": RESOURCES_DIR}, finish=True) self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path}, finish=True) self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\", None)},", "open(self.engine.create_artifact(\"grinder\", \".out\"), \"w\") self.stderr = open(self.engine.create_artifact(\"grinder\", \".err\"), \"w\") self.install_required_tools() scenario", "self.log.warning(msg, req.NAME) continue method = req.method.upper() url = req.url local_headers", "have ramp-down equal to rampup return self.widget def resource_files(self): resource_files", "+= self.get_load().ramp_up # because we have ramp-down equal to rampup", "it from maven and convert to JarTool(?) VERSION = \"3.11\"", "\"3.11\" LOCAL_PATH = \"~/.bzt/grinder-taurus/lib/grinder.jar\" def __init__(self, config=None, **kwargs): settings =", "self.test_names: label = self.test_names[test_id] else: label = \"Test #%s\" %", "{'Test', 'name', 'for', 'ID'}: test_id = line_parts[5][:-1] test_name = '", "1000.0 bytes_count = int(data_fields[self.idx[\"HTTP response length\"]].strip()) test_id = data_fields[self.idx[\"Test\"]].strip() thread_id", "last_pass=False): \"\"\" Generator method that returns next portion of data", "\"\"\" from net.grinder.script import Test from net.grinder.script.Grinder import grinder from", "indent=0)) headers = self.scenario.get_headers() if not self.scenario.get(\"keepalive\", True): headers['Connection'] =", "\"\"\" def __init__(self): super(GrinderExecutor, self).__init__() self.script = None self.exec_id =", "if base_props_file: fds.write(\"# Base Properies File Start: %s\\n\" % base_props_file)", "val)) fds.write(\"# Scenario Properies End\\n\\n\") def __write_bzt_props(self, fds): \"\"\" Write", "self.script: if \"requests\" in scenario: self.script = self.__scenario_from_requests() else: msg", "running - let's stop it. \"\"\" shutdown_process(self.process, self.log) if self.start_time:", "Generator method that returns next portion of data :param last_pass:", "doesn't support '%s' blocks, skipping\" self.log.warning(msg, req.NAME) continue method =", "%s\", test_id, test_name) return None, None worker_id = line[:line.find(' ')]", "scenario): \"\"\" Write scenario props and scenario file props to", "int(data_fields[self.idx[\"Time to first byte\"]]) / 1000.0 r_code = data_fields[self.idx[\"HTTP response", "law or agreed to in writing, software distributed under the", "fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Scenario Properies End\\n\\n\") def __write_bzt_props(self,", "url_arg, indent=0)) self.root.append(self.gen_statement('test = Test(1, \"%s\")' % self.label, indent=0)) self.root.append(self.gen_statement('test.record(request)',", "data file artifact \"\"\" if self.kpi_file: self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor, self).post_process() def", "inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\")) sleep_method.append(self.gen_statement(\"if sleep_time: grinder.logger.info('slept for %sms' % sleep_time)\"))", "self.test_names[test_id] else: label = \"Test #%s\" % test_id source_id =", "+= 1 url, error_msg = self.__parse_prev_lines(worker_id, lnum, r_code, bytes_count) if", "parent_logger): super(GrinderScriptBuilder, self).__init__(scenario, parent_logger) self.label = \"BZT Requests\" def build_source_code(self):", "None)) if global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" % int(global_timeout * 1000), indent=0)) cookie_flag", "next portion of data :param last_pass: \"\"\" self.log.debug(\"Reading grinder results...\")", "let's stop it. \"\"\" shutdown_process(self.process, self.log) if self.start_time: self.end_time =", "fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) return diagnostics class", "parent_logger): super(DataLogReader, self).__init__() self.report_by_url = False self.log = parent_logger.getChild(self.__class__.__name__) self.file", "= 0 self.test_names = {} self.known_threads = set() def _read(self,", "%s\\n\" % base_props_file) with open(base_props_file) as bpf: fds.write(bpf.read()) fds.write(\"# Base", "to fds :param fds: :param scenario: dict :return: \"\"\" script_props_file", "+= line return None, None line = \"%s%s\" % (self.partial_buffer,", "stop it. \"\"\" shutdown_process(self.process, self.log) if self.start_time: self.end_time = time.time()", "TaurusJavaHelper from bzt.requests_model import HTTPRequest from bzt.six import iteritems from", "1 elif set(line_parts[1:5]) == {'Test', 'name', 'for', 'ID'}: test_id =", "dest, 'grinder-' + self.version) os.remove(grinder_dist) self.log.info(\"Installed grinder successfully\") if not", "__init__(self): super(GrinderExecutor, self).__init__() self.script = None self.exec_id = \"grinder-bzt-%s\" %", "and base properties file contents to fds :param fds: fds", "url = '' error_msg = None for lineNo in reversed(range(max(lnum", "Write scenario props and scenario file props to fds :param", "ConsolidatingAggregator, ResultsReader from bzt.modules.console import WidgetProvider, ExecutorWidget from bzt.modules.java import", "url, error_msg = self.__parse_prev_lines(worker_id, lnum, r_code, bytes_count) if int(data_fields[self.idx[\"Errors\"]]) or", "in li_elements] default_link = self.DOWNLOAD_LINK.format(version=self.grinder_version) if default_link not in links:", "def get_widget(self): if not self.widget: if self.script is not None:", "self.version) os.remove(grinder_dist) self.log.info(\"Installed grinder successfully\") if not self.check_if_installed(): raise ToolError(\"Unable", "value in iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r, %r),\" % (header, value), indent=4)) self.root.append(self.gen_statement(\"])\",", "% self.script.replace(os.path.sep, \"/\")) fds.write(\"grinder.logDirectory=%s\\n\" % self.engine.artifacts_dir.replace(os.path.sep, \"/\")) load = self.get_load()", "load.throughput) fds.write(\"taurus.ramp_up=%s\\n\" % load.ramp_up) fds.write(\"taurus.steps=%s\\n\" % load.steps) fds.write(\"taurus.hold_for=%s\\n\" % load.hold)", "fds :param fds: :param scenario: dict :return: \"\"\" script_props_file =", "1000.0) r_time = int(data_fields[self.idx[\"Test time\"]]) / 1000.0 latency = int(data_fields[self.idx[\"Time", "% load.concurrency) if load.duration: fds.write(\"grinder.duration=%s\\n\" % int(load.duration * 1000)) fds.write(\"#", "%d', len(links)) return links class GrinderScriptBuilder(PythonGenerator): IMPORTS = \"\"\" from", "= int(data_fields[self.idx[\"Time to first byte\"]]) / 1000.0 r_code = data_fields[self.idx[\"HTTP", "class GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE = \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK = \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\"", "global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" % int(global_timeout * 1000), indent=0)) cookie_flag = int(self.scenario.get(\"store-cookie\",", "con_time = int(data_fields[self.idx[\"Time to resolve host\"]]) / 1000.0 con_time +=", "parse_line(self, data_fields, worker_id, lnum): worker_id = worker_id.split('.')[1] t_stamp = int(int(data_fields[self.idx[\"Start", "% load.throughput) fds.write(\"taurus.ramp_up=%s\\n\" % load.ramp_up) fds.write(\"taurus.steps=%s\\n\" % load.steps) fds.write(\"taurus.hold_for=%s\\n\" %", "links.append(default_link) self.log.debug('Total mirrors: %d', len(links)) return links class GrinderScriptBuilder(PythonGenerator): IMPORTS", "- self.start_time) def post_process(self): \"\"\" Collect data file artifact \"\"\"", "props local_props = scenario.get(\"properties\") if local_props: fds.write(\"# Scenario Properies Start\\n\")", "tool.install() def get_widget(self): if not self.widget: if self.script is not", "finish=True) self.cmd_line = [\"java\", \"net.grinder.Grinder\", self.properties_file] def startup(self): \"\"\" Should", "of data :param last_pass: \"\"\" self.log.debug(\"Reading grinder results...\") self.lines =", ":param fds: fds :return: \"\"\" base_props_file = self.settings.get(\"properties-file\") if base_props_file:", "script def install_required_tools(self): grinder = self._get_tool(Grinder, config=self.settings) self.settings[\"path\"] = grinder.tool_path", "= self.scenario.get(\"default-address\") url_arg = \"url=%r\" % default_address if default_address else", "STDOUT:\\n\" + contents) return diagnostics class DataLogReader(ResultsReader): \"\"\" Class to", "= worker_id.split('.')[1] t_stamp = int(int(data_fields[self.idx[\"Start time (ms since Epoch)\"]]) /", "self.tool_name, exc) return False def install(self): dest = get_full_path(self.tool_path, step_up=2)", "worker_id = worker_id.split('.')[1] t_stamp = int(int(data_fields[self.idx[\"Start time (ms since Epoch)\"]])", "(header, value), indent=4)) self.root.append(self.gen_statement(\"])\", indent=0)) global_timeout = dehumanize_time(self.scenario.get(\"timeout\", None)) if", "return diagnostics class DataLogReader(ResultsReader): \"\"\" Class to read KPI from", "stuff regarding Grinder tool usage Copyright 2015 BlazeMeter Inc. Licensed", "0 self.test_names = {} self.known_threads = set() def _read(self, last_pass=False):", "self.test_names = {} self.known_threads = set() def _read(self, last_pass=False): \"\"\"", "header_list = line.strip().split(self.DELIMITER) for _ix, field in enumerate(header_list): self.idx[field.strip()] =", "None: if self.retcode != 0: raise ToolError(\"Gatling tool exited with", "= req.headers params = \"[]\" headers = self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r, %s,", "= dehumanize_time(req.priority_option('think-time')) if think_time: main_method.append(self.gen_statement(\"grinder.sleep(%s)\" % int(think_time * 1000))) runner_classdef.append(main_method)", "def prepare(self): self.stdout = open(self.engine.create_artifact(\"grinder\", \".out\"), \"w\") self.stderr = open(self.engine.create_artifact(\"grinder\",", "'finished': if self.concurrency > 0: self.concurrency -= 1 elif set(line_parts[1:5])", "+ data_fields[self.idx[\"Thread\"]].strip() if thread_id not in self.known_threads: self.known_threads.add(thread_id) self.concurrency +=", "is not None: with open(self.stdout.name) as fds: contents = fds.read().strip()", "self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id + \"-kpi.log\") self.reader = DataLogReader(self.kpi_file, self.log)", "iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r, %r),\" % (header, value), indent=4)) self.root.append(self.gen_statement(\"])\", indent=0)) global_timeout", "if local_props: fds.write(\"# Scenario Properies Start\\n\") for key, val in", "lines/s\", (lnum + 1) / duration) def parse_line(self, data_fields, worker_id,", "\"&use_mirror=autoselect\" def __init__(self, http_client, parent_logger, grinder_version): self.grinder_version = grinder_version base_link", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "= int(self.scenario.get(\"store-cookie\", True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" % cookie_flag, indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class()) @staticmethod", "test_name self.log.debug(\"Recognized test id %s => %s\", test_id, test_name) return", "fds.write(\"# Base Properies End\\n\\n\") def __write_scenario_props(self, fds, scenario): \"\"\" Write", "= self.gen_method_definition(\"rampUpSleeper\", [\"self\"]) sleep_method.append(self.gen_statement(\"if grinder.runNumber != 0: return\")) sleep_method.append(self.gen_statement(\"tprops =", "Base Properies File End: %s\\n\\n\" % base_props_file) # base props", "script_file_path = self.get_script_path() if script_file_path: resource_files.append(script_file_path) prop_file = self.get_scenario().get(\"properties-file\") if", "self.log.info(\"Unzipping %s\", grinder_dist) unzip(grinder_dist, dest, 'grinder-' + self.version) os.remove(grinder_dist) self.log.info(\"Installed", "all stuff regarding Grinder tool usage Copyright 2015 BlazeMeter Inc.", "self.tool_name, out) return True except CALL_PROBLEMS as exc: self.log.warning(\"%s check", "not None: duration = time.time() - start if duration <", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "in links: links.append(default_link) self.log.debug('Total mirrors: %d', len(links)) return links class", "False def shutdown(self): \"\"\" If tool is still running -", "data_fields[1].strip().isdigit(): return None, None if len(data_fields) < max(self.idx.values()): return None,", "File End: %s\\n\\n\" % script_props_file) # scenario props local_props =", "from data log \"\"\" DELIMITER = \",\" DETAILS_REGEX = re.compile(r\"worker\\.(\\S+)", "module \"\"\" def __init__(self): super(GrinderExecutor, self).__init__() self.script = None self.exec_id", "self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id + \"-kpi.log\") self.reader", "- let's stop it. \"\"\" shutdown_process(self.process, self.log) if self.start_time: self.end_time", "= \"~/.bzt/grinder-taurus/lib/grinder.jar\" def __init__(self, config=None, **kwargs): settings = config or", "self.engine.create_artifact(\"grinder\", \".properties\") with open(self.properties_file, 'w') as fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario)", "bzt.utils import unzip, RequiredTool, JavaVM, shutdown_process, TclLibrary, FileReader, RESOURCES_DIR class", "\"\"\" Generator method that returns next portion of data :param", "Properies File Start: %s\\n\" % base_props_file) with open(base_props_file) as bpf:", "\"\"\" script_props_file = scenario.get(\"properties-file\") if script_props_file: fds.write(\"# Script Properies File", "for tool in required_tools: if not tool.check_if_installed(): tool.install() def get_widget(self):", "0: return\")) sleep_method.append(self.gen_statement(\"tprops = grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc = tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)\"))", "in iteritems(base_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Base Properies End\\n\\n\")", "= \"Grinder script generator doesn't support '%s' blocks, skipping\" self.log.warning(msg,", "DataLogReader(self.kpi_file, self.log) self.reader.report_by_url = self.settings.get(\"report-by-url\", False) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader)", "matched.group(2), matched.group(4) return url, error_msg class Grinder(RequiredTool): # todo: take", "__write_base_props(self, fds): \"\"\" write base properties and base properties file", "tool in required_tools: if not tool.check_if_installed(): tool.install() def get_widget(self): if", "except in compliance with the License. You may obtain a", "== matched.group(3) and str(bytes_count) == matched.group(5): return matched.group(2), matched.group(4) return", "error_msg = self.__parse_prev_lines(worker_id, lnum, r_code, bytes_count) if int(data_fields[self.idx[\"Errors\"]]) or int(data_fields[self.idx['HTTP", "int(1000 * grinder.threadNumber * inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\")) sleep_method.append(self.gen_statement(\"if sleep_time: grinder.logger.info('slept", "\\ \"-binary.zip/download?use_mirror={mirror}\" li_search_pattern = re.compile(r'<li id=\".*?\">') li_elements = li_search_pattern.findall(self.page_source) if", "= int(1000 * grinder.threadNumber * inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\")) sleep_method.append(self.gen_statement(\"if sleep_time:", "scenario props and scenario file props to fds :param fds:", "= \"\" line = line.strip() if not line.startswith('data.'): line_parts =", "return False def install(self): dest = get_full_path(self.tool_path, step_up=2) self.log.info(\"Will install", "language governing permissions and limitations under the License. \"\"\" import", "duration = time.time() - start if duration < 0.001: duration", "load values in case you need them\\n\") fds.write(\"taurus.concurrency=%s\\n\" % load.concurrency)", "self.start_time) def post_process(self): \"\"\" Collect data file artifact \"\"\" if", "+ \"&use_mirror=autoselect\" def __init__(self, http_client, parent_logger, grinder_version): self.grinder_version = grinder_version", "os.path.basename(self.script) else: label = None self.widget = ExecutorWidget(self, label) if", "* 1024, last_pass=last_pass)) lnum = None start = time.time() for", "0), lnum)): # looking max 100 lines back. TODO: parameterize?", "import unzip, RequiredTool, JavaVM, shutdown_process, TclLibrary, FileReader, RESOURCES_DIR class GrinderExecutor(ScenarioExecutor,", "= os.path.join(self.engine.artifacts_dir, self.exec_id + \"-kpi.log\") self.reader = DataLogReader(self.kpi_file, self.log) self.reader.report_by_url", "self.java_helper.tool_path}, finish=True) self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\", None)}, finish=True) self.cmd_line = [\"java\", \"net.grinder.Grinder\",", "as possible. \"\"\" self.env.set({\"T_GRINDER_PREFIX\": self.exec_id}) self.process = self.execute(self.cmd_line) def check(self):", "Test(1, \"%s\")' % self.label, indent=0)) self.root.append(self.gen_statement('test.record(request)', indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults =", "lnum): worker_id = worker_id.split('.')[1] t_stamp = int(int(data_fields[self.idx[\"Start time (ms since", "'w') as fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir,", ":param scenario: dict :return: \"\"\" script_props_file = scenario.get(\"properties-file\") if script_props_file:", "self.log.debug(\"Grinder worked for %s seconds\", self.end_time - self.start_time) def post_process(self):", "= self.label builder.build_source_code() builder.save(script) return script def install_required_tools(self): grinder =", "msg = \"There must be a script file or requests", "not self.idx: if not line.startswith('data.'): self.__split(line) # to capture early", "possible. \"\"\" self.env.set({\"T_GRINDER_PREFIX\": self.exec_id}) self.process = self.execute(self.cmd_line) def check(self): \"\"\"", "[\"self\"]) sleep_method.append(self.gen_statement(\"if grinder.runNumber != 0: return\")) sleep_method.append(self.gen_statement(\"tprops = grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc", "line in enumerate(self.lines): if not self.idx: if not line.startswith('data.'): self.__split(line)", "if base_props: fds.write(\"# Base Properies Start\\n\") for key, val in", "% id(self) self.properties_file = None self.kpi_file = None self.cmd_line =", "= \"There must be a script file or requests for", "self.exec_id}) self.process = self.execute(self.cmd_line) def check(self): \"\"\" Checks if tool", "skipping\" self.log.warning(msg, req.NAME) continue method = req.method.upper() url = req.url", "props and scenario file props to fds :param fds: :param", ":raise TaurusToolError: \"\"\" self.retcode = self.process.poll() if self.retcode is not", "tool.check_if_installed(): tool.install() def get_widget(self): if not self.widget: if self.script is", "max 100 lines back. TODO: parameterize? line = self.lines[lineNo].strip() matched", "not matched: continue if worker_id == matched.group(1) and r_code ==", "parent_logger, grinder_version): self.grinder_version = grinder_version base_link = self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager, self).__init__(http_client,", "Unless required by applicable law or agreed to in writing,", "Properies End\\n\") def prepare(self): self.stdout = open(self.engine.create_artifact(\"grinder\", \".out\"), \"w\") self.stderr", "is not None: if self.retcode != 0: raise ToolError(\"Gatling tool", "used by worker processes (logback-worker.xml) self.env.add_path({\"CLASSPATH\": RESOURCES_DIR}, finish=True) self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path},", "base_props_file = self.settings.get(\"properties-file\") if base_props_file: fds.write(\"# Base Properies File Start:", "label = \"Test #%s\" % test_id source_id = '' #", "def __scenario_from_requests(self): \"\"\" Generate grinder scenario from requests :return: script", "tool exited with non-zero code: %s\" % self.retcode, self.get_error_diagnostics()) return", "id(self) self.properties_file = None self.kpi_file = None self.cmd_line = None", "None line = \"%s%s\" % (self.partial_buffer, line) self.partial_buffer = \"\"", "id %s => %s\", test_id, test_name) return None, None worker_id", "GrinderMirrorsManager(self.http_client, self.log, self.version) def check_if_installed(self): self.log.debug(\"Trying %s: %s\", self.tool_name, self.tool_path)", "express or implied. See the License for the specific language", "self.log) builder.label = self.label builder.build_source_code() builder.save(script) return script def install_required_tools(self):", "msg += \"to run Grinder tool (%s)\" % self.execution.get('scenario') raise", "self.env.add_path({\"CLASSPATH\": RESOURCES_DIR}, finish=True) self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path}, finish=True) self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\", None)}, finish=True)", "or load.concurrency: fds.write(\"grinder.runs=%s\\n\" % load.iterations or 0) if load.concurrency: fds.write(\"grinder.threads=%s\\n\"", "limitations under the License. \"\"\" import os import re import", "True return False def shutdown(self): \"\"\" If tool is still", "else: label = \"Test #%s\" % test_id source_id = ''", "self.known_threads = set() def _read(self, last_pass=False): \"\"\" Generator method that", "\"Java exception calling TestRunner\" else: error_msg = None # suppress", "= fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) if self.stderr", "< 0.001: duration = 0.001 self.log.debug(\"Log reading speed: %s lines/s\",", "int(data_fields[self.idx[\"Test time\"]]) / 1000.0 latency = int(data_fields[self.idx[\"Time to first byte\"]])", "install(self): dest = get_full_path(self.tool_path, step_up=2) self.log.info(\"Will install %s into %s\",", "try: out, err = self.call([\"java\", \"-classpath\", self.tool_path, \"net.grinder.Grinder\"]) if err:", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "in self.scenario.get_requests(): if not isinstance(req, HTTPRequest): msg = \"Grinder script", "= parent_logger.getChild(self.__class__.__name__) self.file = FileReader(filename=filename, parent_logger=self.log) self.idx = {} self.partial_buffer", "%s, %s)\" % (method, url, params, headers))) think_time = dehumanize_time(req.priority_option('think-time'))", "if r_code != '0': error_msg = \"HTTP %s\" % r_code", "not line.endswith(\"\\n\"): self.partial_buffer += line return None, None line =", "self.retcode != 0: raise ToolError(\"Gatling tool exited with non-zero code:", "looking max 100 lines back. TODO: parameterize? line = self.lines[lineNo].strip()", "self.grinder_version = grinder_version base_link = self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager, self).__init__(http_client, base_link, parent_logger)", "CALL_PROBLEMS as exc: self.log.warning(\"%s check failed: %s\", self.tool_name, exc) return", "not self.scenario.get(\"keepalive\", True): headers['Connection'] = 'close' if headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0))", "from bzt.six import iteritems from bzt.utils import MirrorsManager, dehumanize_time, get_full_path,", "scenario) self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id + \"-kpi.log\") self.reader =", "BZT Properies End\\n\") def prepare(self): self.stdout = open(self.engine.create_artifact(\"grinder\", \".out\"), \"w\")", "resulting logs contains any data and throws exception otherwise. :return:", "self.properties_file = self.engine.create_artifact(\"grinder\", \".properties\") with open(self.properties_file, 'w') as fds: self.__write_base_props(fds)", "from HTTPClient import NVPair \"\"\" def __init__(self, scenario, parent_logger): super(GrinderScriptBuilder,", "with open(self.stderr.name) as fds: contents = fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder", "% test_id source_id = '' # maybe use worker_id somehow?", "for Grinder\") self.root.append(self.gen_comment(\"This script was generated by Taurus\", indent=0)) self.root.append(self.add_imports())", "unzip, RequiredTool, JavaVM, shutdown_process, TclLibrary, FileReader, RESOURCES_DIR class GrinderExecutor(ScenarioExecutor, WidgetProvider,", "= \"3.11\" LOCAL_PATH = \"~/.bzt/grinder-taurus/lib/grinder.jar\" def __init__(self, config=None, **kwargs): settings", "self.label = \"BZT Requests\" def build_source_code(self): self.log.debug(\"Generating Python script for", "0.001: duration = 0.001 self.log.debug(\"Log reading speed: %s lines/s\", (lnum", "self.concurrency > 0: self.concurrency -= 1 elif set(line_parts[1:5]) == {'Test',", "sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\")) sleep_method.append(self.gen_statement(\"if sleep_time: grinder.logger.info('slept for %sms' % sleep_time)\")) sleep_method.append(self.gen_statement(\"else:", "self.process = self.execute(self.cmd_line) def check(self): \"\"\" Checks if tool is", "\"\"\" import os import re import time from bzt import", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "usage Copyright 2015 BlazeMeter Inc. Licensed under the Apache License,", "+= int(data_fields[self.idx[\"Time to establish connection\"]]) / 1000.0 bytes_count = int(data_fields[self.idx[\"HTTP", "the specific language governing permissions and limitations under the License.", "\"\"\" self.log.debug(\"Reading grinder results...\") self.lines = list(self.file.get_lines(size=1024 * 1024, last_pass=last_pass))", "fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id +", "installation!\" % self.tool_name) class GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE = \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\"", "'):] data_fields = line.split(self.DELIMITER) if not data_fields[1].strip().isdigit(): return None, None", "self.properties_file = None self.kpi_file = None self.cmd_line = None self.process", "+ self.version) os.remove(grinder_dist) self.log.info(\"Installed grinder successfully\") if not self.check_if_installed(): raise", "bzt.modules.console import WidgetProvider, ExecutorWidget from bzt.modules.java import TaurusJavaHelper from bzt.requests_model", "failed: %s\", self.tool_name, exc) return False def install(self): dest =", "indent=0)) for header, value in iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r, %r),\" % (header,", "return None, None line = \"%s%s\" % (self.partial_buffer, line) self.partial_buffer", "to fds :param fds: :return: \"\"\" fds.write(\"# BZT Properies Start\\n\")", "or agreed to in writing, software distributed under the License", "step_up=2) self.log.info(\"Will install %s into %s\", self.tool_name, dest) grinder_dist =", "not None: self.log.debug('Parsing mirrors...') base_link = \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\ \"-binary.zip/download?use_mirror={mirror}\" li_search_pattern", "scenario, parent_logger): super(GrinderScriptBuilder, self).__init__(scenario, parent_logger) self.label = \"BZT Requests\" def", "= self.execute(self.cmd_line) def check(self): \"\"\" Checks if tool is still", "self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id + \"-kpi.log\")", "mirrors: %d', len(links)) return links class GrinderScriptBuilder(PythonGenerator): IMPORTS = \"\"\"", "+ \",\".join(\"NVPair(%r, %r)\" % (header, value) for header, value in", "Properies File End: %s\\n\\n\" % base_props_file) # base props base_props", "KPI from data log \"\"\" DELIMITER = \",\" DETAILS_REGEX =", "+= \"to run Grinder tool (%s)\" % self.execution.get('scenario') raise TaurusConfigError(msg)", "+ str(int(time.time())) + \"&use_mirror=autoselect\" def __init__(self, http_client, parent_logger, grinder_version): self.grinder_version", "def get_error_diagnostics(self): diagnostics = [] if self.stdout is not None:", "under the License. \"\"\" import os import re import time", "NVPair \"\"\" def __init__(self, scenario, parent_logger): super(GrinderScriptBuilder, self).__init__(scenario, parent_logger) self.label", "self.exec_id = \"grinder-bzt-%s\" % id(self) self.properties_file = None self.kpi_file =", "Copyright 2015 BlazeMeter Inc. Licensed under the Apache License, Version", "ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add logback configurations used by worker processes", "indent=4)) self.root.append(self.gen_statement(\"])\", indent=0)) global_timeout = dehumanize_time(self.scenario.get(\"timeout\", None)) if global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\"", "= None self.widget = ExecutorWidget(self, label) if self.get_load().ramp_up: self.widget.duration +=", "prop_file: resource_files.append(prop_file) return resource_files def get_error_diagnostics(self): diagnostics = [] if", "import MirrorsManager, dehumanize_time, get_full_path, PythonGenerator, CALL_PROBLEMS from bzt.utils import unzip,", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "not line.startswith('data.'): self.__split(line) # to capture early test name records", "End\\n\") def prepare(self): self.stdout = open(self.engine.create_artifact(\"grinder\", \".out\"), \"w\") self.stderr =", "self.lines = list(self.file.get_lines(size=1024 * 1024, last_pass=last_pass)) lnum = None start", "__scenario_from_requests(self): \"\"\" Generate grinder scenario from requests :return: script \"\"\"", "script_props_file) # scenario props local_props = scenario.get(\"properties\") if local_props: fds.write(\"#", "if len(data_fields) < max(self.idx.values()): return None, None return data_fields, worker_id", "self.root.append(self.add_imports()) self.root.append(self.gen_new_line()) default_address = self.scenario.get(\"default-address\") url_arg = \"url=%r\" % default_address", "False def install(self): dest = get_full_path(self.tool_path, step_up=2) self.log.info(\"Will install %s", "== {'Test', 'name', 'for', 'ID'}: test_id = line_parts[5][:-1] test_name =", "%sms' % sleep_time)\")) sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No sleep needed')\")) sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method) main_method", "= grinder.tool_path self.java_helper = self._get_tool(TaurusJavaHelper) required_tools = [self._get_tool(TclLibrary), self._get_tool(JavaVM), self.java_helper,", "Scenario Properies Start\\n\") for key, val in iteritems(local_props): fds.write(\"%s=%s\\n\" %", ":return: \"\"\" fds.write(\"# BZT Properies Start\\n\") fds.write(\"grinder.hostID=%s\\n\" % self.exec_id) fds.write(\"grinder.script=%s\\n\"", "line = self.lines[lineNo].strip() matched = self.DETAILS_REGEX.match(line) if not matched: continue", "line) self.partial_buffer = \"\" line = line.strip() if not line.startswith('data.'):", "finish=True) self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\", None)}, finish=True) self.cmd_line = [\"java\", \"net.grinder.Grinder\", self.properties_file]", "bzt import TaurusConfigError, ToolError from bzt.engine import ScenarioExecutor, FileLister, HavingInstallableTools,", "to JarTool(?) VERSION = \"3.11\" LOCAL_PATH = \"~/.bzt/grinder-taurus/lib/grinder.jar\" def __init__(self,", "use worker_id somehow? return t_stamp, label, self.concurrency, r_time, con_time, latency,", "'for', 'ID'}: test_id = line_parts[5][:-1] test_name = ' '.join(line_parts[6:]) self.test_names[test_id]", "= scenario.get(\"properties-file\") if script_props_file: fds.write(\"# Script Properies File Start: %s\\n\"", "r_code else: error_msg = \"Java exception calling TestRunner\" else: error_msg", "!= 0: return\")) sleep_method.append(self.gen_statement(\"tprops = grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc = tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency',", ":return: script \"\"\" script = self.engine.create_artifact(\"grinder_requests\", \".py\") builder = GrinderScriptBuilder(self.get_scenario(),", "\"\" self.root.append(self.gen_statement('request = HTTPRequest(%s)' % url_arg, indent=0)) self.root.append(self.gen_statement('test = Test(1,", "0)\")) sleep_method.append(self.gen_statement(\"if sleep_time: grinder.logger.info('slept for %sms' % sleep_time)\")) sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No", "main_method = self.gen_method_definition(\"__call__\", [\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for req in self.scenario.get_requests(): if", "self.settings.get(\"properties-file\") if base_props_file: fds.write(\"# Base Properies File Start: %s\\n\" %", ":param fds: :param scenario: dict :return: \"\"\" script_props_file = scenario.get(\"properties-file\")", "self.settings.get(\"path\", None)}, finish=True) self.cmd_line = [\"java\", \"net.grinder.Grinder\", self.properties_file] def startup(self):", "Grinder tool (%s)\" % self.execution.get('scenario') raise TaurusConfigError(msg) self.properties_file = self.engine.create_artifact(\"grinder\",", "self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults = HTTPPluginControl.getConnectionDefaults()\", indent=0)) self.root.append(self.gen_statement(\"utilities = HTTPPluginControl.getHTTPUtilities()\", indent=0)) headers", "> 1: if line_parts[1] == 'starting,': # self.concurrency += 1", "resource_files(self): resource_files = [] script_file_path = self.get_script_path() if script_file_path: resource_files.append(script_file_path)", "config or {} grinder_path = settings.get(\"path\", self.LOCAL_PATH) grinder_path = get_full_path(grinder_path)", "self.settings.get(\"properties\") if base_props: fds.write(\"# Base Properies Start\\n\") for key, val", "suppress errors if self.report_by_url: label = url elif test_id in", "fds.write(\"# BZT Properies End\\n\") def prepare(self): self.stdout = open(self.engine.create_artifact(\"grinder\", \".out\"),", "write base properties and base properties file contents to fds", "or requests for its generation \" msg += \"to run", "key, val in iteritems(local_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Scenario", "config=None, **kwargs): settings = config or {} grinder_path = settings.get(\"path\",", "errors']]): if not error_msg: if r_code != '0': error_msg =", "% self.execution.get('scenario') raise TaurusConfigError(msg) self.properties_file = self.engine.create_artifact(\"grinder\", \".properties\") with open(self.properties_file,", "self.__split(line) # to capture early test name records continue line", "return t_stamp, label, self.concurrency, r_time, con_time, latency, r_code, error_msg, source_id,", "test_id = data_fields[self.idx[\"Test\"]].strip() thread_id = worker_id + '/' + data_fields[self.idx[\"Thread\"]].strip()", "if not self.check_if_installed(): raise ToolError(\"Unable to run %s after installation!\"", "default_address = self.scenario.get(\"default-address\") url_arg = \"url=%r\" % default_address if default_address", "for header, value in iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r, %r),\" % (header, value),", "> 0: self.concurrency -= 1 elif set(line_parts[1:5]) == {'Test', 'name',", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "\"\"\" self.retcode = self.process.poll() if self.retcode is not None: if", "default_link not in links: links.append(default_link) self.log.debug('Total mirrors: %d', len(links)) return", "from net.grinder.plugin.http import HTTPRequest, HTTPPluginControl, HTTPUtilities from HTTPClient import NVPair", "= {} self.known_threads = set() def _read(self, last_pass=False): \"\"\" Generator", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "required_tools: if not tool.check_if_installed(): tool.install() def get_widget(self): if not self.widget:", "# to capture early test name records continue line =", "'ID'}: test_id = line_parts[5][:-1] test_name = ' '.join(line_parts[6:]) self.test_names[test_id] =", "successfully\") if not self.check_if_installed(): raise ToolError(\"Unable to run %s after", "lnum is not None: duration = time.time() - start if", "= \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\ \"-binary.zip/download?use_mirror={mirror}\" li_search_pattern = re.compile(r'<li id=\".*?\">') li_elements =", "self.stderr is not None: with open(self.stderr.name) as fds: contents =", "self.process = None self.end_time = None self.retcode = None self.java_helper", "return data_fields, worker_id def __parse_prev_lines(self, worker_id, lnum, r_code, bytes_count): url", ":return: bool :raise TaurusToolError: \"\"\" self.retcode = self.process.poll() if self.retcode", "the tool as fast as possible. \"\"\" self.env.set({\"T_GRINDER_PREFIX\": self.exec_id}) self.process", "tool is still running. Also checks if resulting logs contains", "if not matched: continue if worker_id == matched.group(1) and r_code", "STDOUT:\\n\" + contents) if self.stderr is not None: with open(self.stderr.name)", "Properies End\\n\\n\") def __write_bzt_props(self, fds): \"\"\" Write bzt properties to", "self.root.append(self.gen_statement('request = HTTPRequest(%s)' % url_arg, indent=0)) self.root.append(self.gen_statement('test = Test(1, \"%s\")'", "self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0)) for header, value in iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r, %r),\" %", "and convert to JarTool(?) VERSION = \"3.11\" LOCAL_PATH = \"~/.bzt/grinder-taurus/lib/grinder.jar\"", "fds.write(\"# Base Properies File End: %s\\n\\n\" % base_props_file) # base", "add logback configurations used by worker processes (logback-worker.xml) self.env.add_path({\"CLASSPATH\": RESOURCES_DIR},", "not data_fields: self.log.debug(\"Skipping line: %s\", line.strip()) continue yield self.parse_line(data_fields, worker_id,", "0 self.concurrency = 0 self.test_names = {} self.known_threads = set()", "__parse_prev_lines(self, worker_id, lnum, r_code, bytes_count): url = '' error_msg =", "bytes_count def __split(self, line): if not line.endswith(\"\\n\"): self.partial_buffer += line", "iteritems from bzt.utils import MirrorsManager, dehumanize_time, get_full_path, PythonGenerator, CALL_PROBLEMS from", "run %s after installation!\" % self.tool_name) class GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE =", "base properties file contents to fds :param fds: fds :return:", "ResultsReader from bzt.modules.console import WidgetProvider, ExecutorWidget from bzt.modules.java import TaurusJavaHelper", "Start\\n\") fds.write(\"grinder.hostID=%s\\n\" % self.exec_id) fds.write(\"grinder.script=%s\\n\" % self.script.replace(os.path.sep, \"/\")) fds.write(\"grinder.logDirectory=%s\\n\" %", "values in case you need them\\n\") fds.write(\"taurus.concurrency=%s\\n\" % load.concurrency) fds.write(\"taurus.throughput=%s\\n\"", "checks if resulting logs contains any data and throws exception", "= [\"java\", \"net.grinder.Grinder\", self.properties_file] def startup(self): \"\"\" Should start the", "= req.url local_headers = req.headers params = \"[]\" headers =", "= self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r, %s, %s)\" % (method, url, params, headers)))", "= re.compile(r'<li id=\".*?\">') li_elements = li_search_pattern.findall(self.page_source) if li_elements: links =", "self.__parse_prev_lines(worker_id, lnum, r_code, bytes_count) if int(data_fields[self.idx[\"Errors\"]]) or int(data_fields[self.idx['HTTP response errors']]):", "if lnum is not None: duration = time.time() - start", "= fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) return diagnostics", "self.tool_path, \"net.grinder.Grinder\"]) if err: out += err self.log.debug(\"%s stdout: %s\",", "filename, parent_logger): super(DataLogReader, self).__init__() self.report_by_url = False self.log = parent_logger.getChild(self.__class__.__name__)", "0: raise ToolError(\"Gatling tool exited with non-zero code: %s\" %", "tool as fast as possible. \"\"\" self.env.set({\"T_GRINDER_PREFIX\": self.exec_id}) self.process =", "if self.kpi_file: self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor, self).post_process() def __scenario_from_requests(self): \"\"\" Generate grinder", "prepare(self): self.stdout = open(self.engine.create_artifact(\"grinder\", \".out\"), \"w\") self.stderr = open(self.engine.create_artifact(\"grinder\", \".err\"),", "not isinstance(req, HTTPRequest): msg = \"Grinder script generator doesn't support", "run Grinder tool (%s)\" % self.execution.get('scenario') raise TaurusConfigError(msg) self.properties_file =", "len(links)) return links class GrinderScriptBuilder(PythonGenerator): IMPORTS = \"\"\" from net.grinder.script", "\"\"\" Class to read KPI from data log \"\"\" DELIMITER", "self.root.append(self.gen_statement(\"NVPair(%r, %r),\" % (header, value), indent=4)) self.root.append(self.gen_statement(\"])\", indent=0)) global_timeout =", "fds.write(spf.read()) fds.write(\"# Script Properies File End: %s\\n\\n\" % script_props_file) #", "')] line = line[line.find(' '):] data_fields = line.split(self.DELIMITER) if not", "permissions and limitations under the License. \"\"\" import os import", "under the Apache License, Version 2.0 (the \"License\"); you may", "= settings.get(\"path\", self.LOCAL_PATH) grinder_path = get_full_path(grinder_path) download_link = settings.get(\"download-link\", \"\")", "[self._get_tool(TclLibrary), self._get_tool(JavaVM), self.java_helper, grinder] for tool in required_tools: if not", "requests for its generation \" msg += \"to run Grinder", "grinder.runNumber != 0: return\")) sleep_method.append(self.gen_statement(\"tprops = grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc = tprops.getDouble('ramp_up',", "= test_name self.log.debug(\"Recognized test id %s => %s\", test_id, test_name)", "max(self.idx.values()): return None, None return data_fields, worker_id def __parse_prev_lines(self, worker_id,", "url_arg = \"url=%r\" % default_address if default_address else \"\" self.root.append(self.gen_statement('request", "self.root.append(self.gen_statement(\"defaults = HTTPPluginControl.getConnectionDefaults()\", indent=0)) self.root.append(self.gen_statement(\"utilities = HTTPPluginControl.getHTTPUtilities()\", indent=0)) headers =", "bzt.engine import ScenarioExecutor, FileLister, HavingInstallableTools, SelfDiagnosable from bzt.modules.aggregator import ConsolidatingAggregator,", "if think_time: main_method.append(self.gen_statement(\"grinder.sleep(%s)\" % int(think_time * 1000))) runner_classdef.append(main_method) return runner_classdef", "lnum, r_code, bytes_count) if int(data_fields[self.idx[\"Errors\"]]) or int(data_fields[self.idx['HTTP response errors']]): if", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "response length\"]].strip()) test_id = data_fields[self.idx[\"Test\"]].strip() thread_id = worker_id + '/'", "fds: fds :return: \"\"\" base_props_file = self.settings.get(\"properties-file\") if base_props_file: fds.write(\"#", "enumerate(header_list): self.idx[field.strip()] = _ix data_fields, worker_id = self.__split(line) if not", "url, error_msg class Grinder(RequiredTool): # todo: take it from maven", "self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor, self).post_process() def __scenario_from_requests(self): \"\"\" Generate grinder scenario from", "HTTPRequest, HTTPPluginControl, HTTPUtilities from HTTPClient import NVPair \"\"\" def __init__(self,", "\"\"\" Checks if tool is still running. Also checks if", "% load.steps) fds.write(\"taurus.hold_for=%s\\n\" % load.hold) fds.write(\"taurus.iterations=%s\\n\" % load.iterations) fds.write(\"# BZT", "matched.group(3) and str(bytes_count) == matched.group(5): return matched.group(2), matched.group(4) return url,", "in writing, software distributed under the License is distributed on", "support '%s' blocks, skipping\" self.log.warning(msg, req.NAME) continue method = req.method.upper()", "contents) return diagnostics class DataLogReader(ResultsReader): \"\"\" Class to read KPI", "required by applicable law or agreed to in writing, software", "def post_process(self): \"\"\" Collect data file artifact \"\"\" if self.kpi_file:", "resource_files def get_error_diagnostics(self): diagnostics = [] if self.stdout is not", "self.script is not None: label = \"Grinder: %s\" % os.path.basename(self.script)", "sleep_method.append(self.gen_statement(\"inc = tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)\")) sleep_method.append(self.gen_statement(\"sleep_time = int(1000 * grinder.threadNumber", "tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)\")) sleep_method.append(self.gen_statement(\"sleep_time = int(1000 * grinder.threadNumber * inc)\"))", "ExecutorWidget(self, label) if self.get_load().ramp_up: self.widget.duration += self.get_load().ramp_up # because we", "is not None: label = \"Grinder: %s\" % os.path.basename(self.script) else:", "records continue line = line[line.find(' '):] header_list = line.strip().split(self.DELIMITER) for", "load = self.get_load() if load.iterations or load.concurrency: fds.write(\"grinder.runs=%s\\n\" % load.iterations", "== matched.group(5): return matched.group(2), matched.group(4) return url, error_msg class Grinder(RequiredTool):", "get_full_path(grinder_path) download_link = settings.get(\"download-link\", \"\") super(Grinder, self).__init__(tool_path=grinder_path, download_link=download_link, **kwargs) self.version", "(\\S+) (.+), (\\d+) bytes\") def __init__(self, filename, parent_logger): super(DataLogReader, self).__init__()", "\"\") super(Grinder, self).__init__(tool_path=grinder_path, download_link=download_link, **kwargs) self.version = self.VERSION self.mirror_manager =", "1000), indent=0)) cookie_flag = int(self.scenario.get(\"store-cookie\", True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" % cookie_flag, indent=0))", "1)\")) sleep_method.append(self.gen_statement(\"sleep_time = int(1000 * grinder.threadNumber * inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\"))", "'' error_msg = None for lineNo in reversed(range(max(lnum - 100,", "if self.retcode is not None: if self.retcode != 0: raise", "1) / duration) def parse_line(self, data_fields, worker_id, lnum): worker_id =", "code: %s\" % self.retcode, self.get_error_diagnostics()) return True return False def", "distributed under the License is distributed on an \"AS IS\"", "int(data_fields[self.idx[\"Time to resolve host\"]]) / 1000.0 con_time += int(data_fields[self.idx[\"Time to", "\"-kpi.log\") self.reader = DataLogReader(self.kpi_file, self.log) self.reader.report_by_url = self.settings.get(\"report-by-url\", False) if", "CONDITIONS OF ANY KIND, either express or implied. See the", "from net.grinder.script.Grinder import grinder from net.grinder.plugin.http import HTTPRequest, HTTPPluginControl, HTTPUtilities", "self.label self.script = self.get_script_path() if not self.script: if \"requests\" in", "self.end_time = 0 self.concurrency = 0 self.test_names = {} self.known_threads", "= None # suppress errors if self.report_by_url: label = url", "list(self.file.get_lines(size=1024 * 1024, last_pass=last_pass)) lnum = None start = time.time()", "convert to JarTool(?) VERSION = \"3.11\" LOCAL_PATH = \"~/.bzt/grinder-taurus/lib/grinder.jar\" def", "False) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add logback configurations used", "dehumanize_time, get_full_path, PythonGenerator, CALL_PROBLEMS from bzt.utils import unzip, RequiredTool, JavaVM,", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "= self.DOWNLOAD_LINK.format(version=self.grinder_version) if default_link not in links: links.append(default_link) self.log.debug('Total mirrors:", "as spf: fds.write(spf.read()) fds.write(\"# Script Properies File End: %s\\n\\n\" %", "Properies Start\\n\") fds.write(\"grinder.hostID=%s\\n\" % self.exec_id) fds.write(\"grinder.script=%s\\n\" % self.script.replace(os.path.sep, \"/\")) fds.write(\"grinder.logDirectory=%s\\n\"", "MIRRORS_SOURCE = \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK = \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\ \"/grinder-{version}-binary.zip?r=&ts=\"", "HTTPRequest from bzt.six import iteritems from bzt.utils import MirrorsManager, dehumanize_time,", "Start\\n\") for key, val in iteritems(local_props): fds.write(\"%s=%s\\n\" % (key, val))", "def __init__(self): super(GrinderExecutor, self).__init__() self.script = None self.exec_id = \"grinder-bzt-%s\"", "check failed: %s\", self.tool_name, exc) return False def install(self): dest", "import re import time from bzt import TaurusConfigError, ToolError from", "as bpf: fds.write(bpf.read()) fds.write(\"# Base Properies File End: %s\\n\\n\" %", "self.log.info(\"Installed grinder successfully\") if not self.check_if_installed(): raise ToolError(\"Unable to run", "return True return False def shutdown(self): \"\"\" If tool is", "% self.label, indent=0)) self.root.append(self.gen_statement('test.record(request)', indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults = HTTPPluginControl.getConnectionDefaults()\", indent=0))", "'%s' blocks, skipping\" self.log.warning(msg, req.NAME) continue method = req.method.upper() url", "= None self.end_time = None self.retcode = None self.java_helper =", "bzt properties to fds :param fds: :return: \"\"\" fds.write(\"# BZT", "{} self.known_threads = set() def _read(self, last_pass=False): \"\"\" Generator method", "= None self.cmd_line = None self.process = None self.end_time =", "Base Properies End\\n\\n\") def __write_scenario_props(self, fds, scenario): \"\"\" Write scenario", "script generator doesn't support '%s' blocks, skipping\" self.log.warning(msg, req.NAME) continue", "Properies File End: %s\\n\\n\" % script_props_file) # scenario props local_props", "seconds\", self.end_time - self.start_time) def post_process(self): \"\"\" Collect data file", "Script Properies File Start: %s\\n\" % script_props_file) with open(script_props_file) as", "from bzt.requests_model import HTTPRequest from bzt.six import iteritems from bzt.utils", "out += err self.log.debug(\"%s stdout: %s\", self.tool_name, out) return True", "indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class()) @staticmethod def __list_to_nvpair_list(items): return \"[\" + \",\".join(\"NVPair(%r,", "self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping %s\", grinder_dist) unzip(grinder_dist, dest, 'grinder-' + self.version) os.remove(grinder_dist)", "spf: fds.write(spf.read()) fds.write(\"# Script Properies File End: %s\\n\\n\" % script_props_file)", "tool is still running - let's stop it. \"\"\" shutdown_process(self.process,", "test_id source_id = '' # maybe use worker_id somehow? return", "parent_logger.getChild(self.__class__.__name__) self.file = FileReader(filename=filename, parent_logger=self.log) self.idx = {} self.partial_buffer =", "End: %s\\n\\n\" % script_props_file) # scenario props local_props = scenario.get(\"properties\")", "not None: with open(self.stdout.name) as fds: contents = fds.read().strip() if", "req.url local_headers = req.headers params = \"[]\" headers = self.__list_to_nvpair_list(iteritems(local_headers))", "%s\", grinder_dist) unzip(grinder_dist, dest, 'grinder-' + self.version) os.remove(grinder_dist) self.log.info(\"Installed grinder", "worker_id + '/' + data_fields[self.idx[\"Thread\"]].strip() if thread_id not in self.known_threads:", "%s into %s\", self.tool_name, dest) grinder_dist = self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping %s\",", "base_link, parent_logger) def _parse_mirrors(self): links = [] if self.page_source is", "worker_id.split('.')[1] t_stamp = int(int(data_fields[self.idx[\"Start time (ms since Epoch)\"]]) / 1000.0)", "\"Test #%s\" % test_id source_id = '' # maybe use", "thread_id = worker_id + '/' + data_fields[self.idx[\"Thread\"]].strip() if thread_id not", "err: out += err self.log.debug(\"%s stdout: %s\", self.tool_name, out) return", "\\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK = \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\ \"/grinder-{version}-binary.zip?r=&ts=\" + str(int(time.time())) +", "= None self.retcode = None self.java_helper = None def __write_base_props(self,", "self.engine.create_artifact(\"grinder_requests\", \".py\") builder = GrinderScriptBuilder(self.get_scenario(), self.log) builder.label = self.label builder.build_source_code()", "1 url, error_msg = self.__parse_prev_lines(worker_id, lnum, r_code, bytes_count) if int(data_fields[self.idx[\"Errors\"]])", "= HTTPPluginControl.getConnectionDefaults()\", indent=0)) self.root.append(self.gen_statement(\"utilities = HTTPPluginControl.getHTTPUtilities()\", indent=0)) headers = self.scenario.get_headers()", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "self.log.info(\"Will install %s into %s\", self.tool_name, dest) grinder_dist = self._download(use_link=bool(self.download_link))", "GrinderScriptBuilder(PythonGenerator): IMPORTS = \"\"\" from net.grinder.script import Test from net.grinder.script.Grinder", "fds :param fds: fds :return: \"\"\" base_props_file = self.settings.get(\"properties-file\") if", "self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path}, finish=True) self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\", None)}, finish=True) self.cmd_line = [\"java\",", "def _read(self, last_pass=False): \"\"\" Generator method that returns next portion", "line.endswith(\"\\n\"): self.partial_buffer += line return None, None line = \"%s%s\"", "return True except CALL_PROBLEMS as exc: self.log.warning(\"%s check failed: %s\",", "[] if self.page_source is not None: self.log.debug('Parsing mirrors...') base_link =", "isinstance(req, HTTPRequest): msg = \"Grinder script generator doesn't support '%s'", "local_props = scenario.get(\"properties\") if local_props: fds.write(\"# Scenario Properies Start\\n\") for", "self).__init__() self.report_by_url = False self.log = parent_logger.getChild(self.__class__.__name__) self.file = FileReader(filename=filename,", "fds :return: \"\"\" base_props_file = self.settings.get(\"properties-file\") if base_props_file: fds.write(\"# Base", "TODO: parameterize? line = self.lines[lineNo].strip() matched = self.DETAILS_REGEX.match(line) if not", "= False self.log = parent_logger.getChild(self.__class__.__name__) self.file = FileReader(filename=filename, parent_logger=self.log) self.idx", "= self.settings.get(\"properties\") if base_props: fds.write(\"# Base Properies Start\\n\") for key,", "1000)) fds.write(\"# taurus load values in case you need them\\n\")", "self.script = self.__scenario_from_requests() else: msg = \"There must be a", "isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add logback configurations used by worker", "iteritems(local_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Scenario Properies End\\n\\n\") def", "into %s\", self.tool_name, dest) grinder_dist = self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping %s\", grinder_dist)", "self.log) self.reader.report_by_url = self.settings.get(\"report-by-url\", False) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) #", "contents to fds :param fds: fds :return: \"\"\" base_props_file =", "fds.write(\"taurus.steps=%s\\n\" % load.steps) fds.write(\"taurus.hold_for=%s\\n\" % load.hold) fds.write(\"taurus.iterations=%s\\n\" % load.iterations) fds.write(\"#", "self.tool_name) class GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE = \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK =", "runner_classdef = self.gen_class_definition(\"TestRunner\", [\"object\"]) sleep_method = self.gen_method_definition(\"rampUpSleeper\", [\"self\"]) sleep_method.append(self.gen_statement(\"if grinder.runNumber", "import TaurusConfigError, ToolError from bzt.engine import ScenarioExecutor, FileLister, HavingInstallableTools, SelfDiagnosable", "in self.test_names: label = self.test_names[test_id] else: label = \"Test #%s\"", "parent_logger) self.label = \"BZT Requests\" def build_source_code(self): self.log.debug(\"Generating Python script", "sleep_time: grinder.logger.info('slept for %sms' % sleep_time)\")) sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No sleep needed')\"))", "shutdown(self): \"\"\" If tool is still running - let's stop", "base_props_file: fds.write(\"# Base Properies File Start: %s\\n\" % base_props_file) with", "may not use this file except in compliance with the", "tool usage Copyright 2015 BlazeMeter Inc. Licensed under the Apache", "'.join(line_parts[6:]) self.test_names[test_id] = test_name self.log.debug(\"Recognized test id %s => %s\",", "self.java_helper = None def __write_base_props(self, fds): \"\"\" write base properties", "**kwargs) self.version = self.VERSION self.mirror_manager = GrinderMirrorsManager(self.http_client, self.log, self.version) def", "= self.settings.get(\"report-by-url\", False) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add logback", "int(data_fields[self.idx[\"Time to establish connection\"]]) / 1000.0 bytes_count = int(data_fields[self.idx[\"HTTP response", "% script_props_file) # scenario props local_props = scenario.get(\"properties\") if local_props:", "data_fields: self.log.debug(\"Skipping line: %s\", line.strip()) continue yield self.parse_line(data_fields, worker_id, lnum)", "= li_search_pattern.findall(self.page_source) if li_elements: links = [base_link.format(version=self.grinder_version, mirror=link.strip('<li id=\"').strip('\">')) for", "con_time += int(data_fields[self.idx[\"Time to establish connection\"]]) / 1000.0 bytes_count =", "% int(load.duration * 1000)) fds.write(\"# taurus load values in case", "\"net.grinder.Grinder\", self.properties_file] def startup(self): \"\"\" Should start the tool as", "= self.VERSION self.mirror_manager = GrinderMirrorsManager(self.http_client, self.log, self.version) def check_if_installed(self): self.log.debug(\"Trying", "ToolError(\"Gatling tool exited with non-zero code: %s\" % self.retcode, self.get_error_diagnostics())", "if script_file_path: resource_files.append(script_file_path) prop_file = self.get_scenario().get(\"properties-file\") if prop_file: resource_files.append(prop_file) return", "ExecutorWidget from bzt.modules.java import TaurusJavaHelper from bzt.requests_model import HTTPRequest from", "(key, val)) fds.write(\"# Base Properies End\\n\\n\") def __write_scenario_props(self, fds, scenario):", "load.iterations or load.concurrency: fds.write(\"grinder.runs=%s\\n\" % load.iterations or 0) if load.concurrency:", "Generate grinder scenario from requests :return: script \"\"\" script =", "link in li_elements] default_link = self.DOWNLOAD_LINK.format(version=self.grinder_version) if default_link not in", "continue if worker_id == matched.group(1) and r_code == matched.group(3) and", "int(self.scenario.get(\"store-cookie\", True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" % cookie_flag, indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class()) @staticmethod def", "self.VERSION self.mirror_manager = GrinderMirrorsManager(self.http_client, self.log, self.version) def check_if_installed(self): self.log.debug(\"Trying %s:", "val)) fds.write(\"# Base Properies End\\n\\n\") def __write_scenario_props(self, fds, scenario): \"\"\"", "is not None: self.log.debug('Parsing mirrors...') base_link = \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\ \"-binary.zip/download?use_mirror={mirror}\"", "\".err\"), \"w\") self.install_required_tools() scenario = self.get_scenario() self.exec_id = self.label self.script", "return script def install_required_tools(self): grinder = self._get_tool(Grinder, config=self.settings) self.settings[\"path\"] =", "data_fields[self.idx[\"Thread\"]].strip() if thread_id not in self.known_threads: self.known_threads.add(thread_id) self.concurrency += 1", "else: error_msg = None # suppress errors if self.report_by_url: label", "agreed to in writing, software distributed under the License is", "from bzt.utils import MirrorsManager, dehumanize_time, get_full_path, PythonGenerator, CALL_PROBLEMS from bzt.utils", "self.root.append(self.gen_runner_class()) @staticmethod def __list_to_nvpair_list(items): return \"[\" + \",\".join(\"NVPair(%r, %r)\" %", "lnum) if lnum is not None: duration = time.time() -", "id=\"').strip('\">')) for link in li_elements] default_link = self.DOWNLOAD_LINK.format(version=self.grinder_version) if default_link", "self.log.debug(\"Skipping line: %s\", line.strip()) continue yield self.parse_line(data_fields, worker_id, lnum) if", "None, None if len(data_fields) < max(self.idx.values()): return None, None return", "iteritems(base_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Base Properies End\\n\\n\") def", "None self.process = None self.end_time = None self.retcode = None", "error_msg: if r_code != '0': error_msg = \"HTTP %s\" %", "startup(self): \"\"\" Should start the tool as fast as possible.", "None: duration = time.time() - start if duration < 0.001:", "def parse_line(self, data_fields, worker_id, lnum): worker_id = worker_id.split('.')[1] t_stamp =", "line = line.strip() if not line.startswith('data.'): line_parts = line.split(' ')", "= line.split(' ') if len(line_parts) > 1: if line_parts[1] ==", "self.log = parent_logger.getChild(self.__class__.__name__) self.file = FileReader(filename=filename, parent_logger=self.log) self.idx = {}", "self.page_source is not None: self.log.debug('Parsing mirrors...') base_link = \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\", "= scenario.get(\"properties\") if local_props: fds.write(\"# Scenario Properies Start\\n\") for key,", "* grinder.threadNumber * inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\")) sleep_method.append(self.gen_statement(\"if sleep_time: grinder.logger.info('slept for", "host\"]]) / 1000.0 con_time += int(data_fields[self.idx[\"Time to establish connection\"]]) /", "indent=0)) self.root.append(self.add_imports()) self.root.append(self.gen_new_line()) default_address = self.scenario.get(\"default-address\") url_arg = \"url=%r\" %", "re.compile(r\"worker\\.(\\S+) (.+) -> (\\S+) (.+), (\\d+) bytes\") def __init__(self, filename,", "worker_id == matched.group(1) and r_code == matched.group(3) and str(bytes_count) ==", "WidgetProvider, ExecutorWidget from bzt.modules.java import TaurusJavaHelper from bzt.requests_model import HTTPRequest", "self.execution.get('scenario') raise TaurusConfigError(msg) self.properties_file = self.engine.create_artifact(\"grinder\", \".properties\") with open(self.properties_file, 'w')", "default_address if default_address else \"\" self.root.append(self.gen_statement('request = HTTPRequest(%s)' % url_arg,", "self.label builder.build_source_code() builder.save(script) return script def install_required_tools(self): grinder = self._get_tool(Grinder,", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "pass elif line_parts[1] == 'finished': if self.concurrency > 0: self.concurrency", "in scenario: self.script = self.__scenario_from_requests() else: msg = \"There must", "else: msg = \"There must be a script file or", "HTTPRequest(%s)' % url_arg, indent=0)) self.root.append(self.gen_statement('test = Test(1, \"%s\")' % self.label,", "in iteritems(local_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Scenario Properies End\\n\\n\")", "throws exception otherwise. :return: bool :raise TaurusToolError: \"\"\" self.retcode =", "% default_address if default_address else \"\" self.root.append(self.gen_statement('request = HTTPRequest(%s)' %", "Properies File Start: %s\\n\" % script_props_file) with open(script_props_file) as spf:", "self.widget: if self.script is not None: label = \"Grinder: %s\"", "-= 1 elif set(line_parts[1:5]) == {'Test', 'name', 'for', 'ID'}: test_id", "self.partial_buffer += line return None, None line = \"%s%s\" %", "= self.settings.get(\"properties-file\") if base_props_file: fds.write(\"# Base Properies File Start: %s\\n\"", "self.gen_class_definition(\"TestRunner\", [\"object\"]) sleep_method = self.gen_method_definition(\"rampUpSleeper\", [\"self\"]) sleep_method.append(self.gen_statement(\"if grinder.runNumber != 0:", "self.check_if_installed(): raise ToolError(\"Unable to run %s after installation!\" % self.tool_name)", "return links class GrinderScriptBuilder(PythonGenerator): IMPORTS = \"\"\" from net.grinder.script import", "None: self.log.debug('Parsing mirrors...') base_link = \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\ \"-binary.zip/download?use_mirror={mirror}\" li_search_pattern =", "% url_arg, indent=0)) self.root.append(self.gen_statement('test = Test(1, \"%s\")' % self.label, indent=0))", "= self.get_load() if load.iterations or load.concurrency: fds.write(\"grinder.runs=%s\\n\" % load.iterations or", "self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" % cookie_flag, indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class()) @staticmethod def __list_to_nvpair_list(items): return", "\"\"\" Should start the tool as fast as possible. \"\"\"", "not self.check_if_installed(): raise ToolError(\"Unable to run %s after installation!\" %", "= line[line.find(' '):] header_list = line.strip().split(self.DELIMITER) for _ix, field in", "duration) def parse_line(self, data_fields, worker_id, lnum): worker_id = worker_id.split('.')[1] t_stamp", "\"url=%r\" % default_address if default_address else \"\" self.root.append(self.gen_statement('request = HTTPRequest(%s)'", "continue line = line[line.find(' '):] header_list = line.strip().split(self.DELIMITER) for _ix,", "default_link = self.DOWNLOAD_LINK.format(version=self.grinder_version) if default_link not in links: links.append(default_link) self.log.debug('Total", "JavaVM, shutdown_process, TclLibrary, FileReader, RESOURCES_DIR class GrinderExecutor(ScenarioExecutor, WidgetProvider, FileLister, HavingInstallableTools,", "source_id = '' # maybe use worker_id somehow? return t_stamp,", "= \",\" DETAILS_REGEX = re.compile(r\"worker\\.(\\S+) (.+) -> (\\S+) (.+), (\\d+)", "SelfDiagnosable): \"\"\" Grinder executor module \"\"\" def __init__(self): super(GrinderExecutor, self).__init__()", "WidgetProvider, FileLister, HavingInstallableTools, SelfDiagnosable): \"\"\" Grinder executor module \"\"\" def", "prop_file = self.get_scenario().get(\"properties-file\") if prop_file: resource_files.append(prop_file) return resource_files def get_error_diagnostics(self):", "- 100, 0), lnum)): # looking max 100 lines back.", "return \"[\" + \",\".join(\"NVPair(%r, %r)\" % (header, value) for header,", "return None, None if len(data_fields) < max(self.idx.values()): return None, None", "compliance with the License. You may obtain a copy of", "file or requests for its generation \" msg += \"to", "script \"\"\" script = self.engine.create_artifact(\"grinder_requests\", \".py\") builder = GrinderScriptBuilder(self.get_scenario(), self.log)", "indent=0)) self.root.append(self.gen_statement('test.record(request)', indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults = HTTPPluginControl.getConnectionDefaults()\", indent=0)) self.root.append(self.gen_statement(\"utilities =", "self.root.append(self.gen_statement('test.record(request)', indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults = HTTPPluginControl.getConnectionDefaults()\", indent=0)) self.root.append(self.gen_statement(\"utilities = HTTPPluginControl.getHTTPUtilities()\",", "elif test_id in self.test_names: label = self.test_names[test_id] else: label =", "continue yield self.parse_line(data_fields, worker_id, lnum) if lnum is not None:", "bzt.modules.aggregator import ConsolidatingAggregator, ResultsReader from bzt.modules.console import WidgetProvider, ExecutorWidget from", "finish=True) self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path}, finish=True) self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\", None)}, finish=True) self.cmd_line =", "self.version) def check_if_installed(self): self.log.debug(\"Trying %s: %s\", self.tool_name, self.tool_path) try: out,", "self.get_load().ramp_up: self.widget.duration += self.get_load().ramp_up # because we have ramp-down equal", "def build_source_code(self): self.log.debug(\"Generating Python script for Grinder\") self.root.append(self.gen_comment(\"This script was", "Write bzt properties to fds :param fds: :return: \"\"\" fds.write(\"#", "return False def shutdown(self): \"\"\" If tool is still running", "self.script = self.get_script_path() if not self.script: if \"requests\" in scenario:", "data_fields[self.idx[\"HTTP response code\"]].strip() con_time = int(data_fields[self.idx[\"Time to resolve host\"]]) /", "time.time() for lnum, line in enumerate(self.lines): if not self.idx: if", "base_props = self.settings.get(\"properties\") if base_props: fds.write(\"# Base Properies Start\\n\") for", "= \"url=%r\" % default_address if default_address else \"\" self.root.append(self.gen_statement('request =", "%s)\" % (method, url, params, headers))) think_time = dehumanize_time(req.priority_option('think-time')) if", "for its generation \" msg += \"to run Grinder tool", "grinder_version base_link = self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager, self).__init__(http_client, base_link, parent_logger) def _parse_mirrors(self):", "= \"[]\" headers = self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r, %s, %s)\" % (method,", "= int(data_fields[self.idx[\"Time to resolve host\"]]) / 1000.0 con_time += int(data_fields[self.idx[\"Time", "\\ \"/grinder-{version}-binary.zip?r=&ts=\" + str(int(time.time())) + \"&use_mirror=autoselect\" def __init__(self, http_client, parent_logger,", "if len(line_parts) > 1: if line_parts[1] == 'starting,': # self.concurrency", "reading speed: %s lines/s\", (lnum + 1) / duration) def", "\"\"\" fds.write(\"# BZT Properies Start\\n\") fds.write(\"grinder.hostID=%s\\n\" % self.exec_id) fds.write(\"grinder.script=%s\\n\" %", "int(data_fields[self.idx['HTTP response errors']]): if not error_msg: if r_code != '0':", "self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager, self).__init__(http_client, base_link, parent_logger) def _parse_mirrors(self): links = []", "after installation!\" % self.tool_name) class GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE = \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\", "if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add logback configurations used by", "GrinderExecutor(ScenarioExecutor, WidgetProvider, FileLister, HavingInstallableTools, SelfDiagnosable): \"\"\" Grinder executor module \"\"\"", "End: %s\\n\\n\" % base_props_file) # base props base_props = self.settings.get(\"properties\")", "def __init__(self, filename, parent_logger): super(DataLogReader, self).__init__() self.report_by_url = False self.log", "\"\"\" self.env.set({\"T_GRINDER_PREFIX\": self.exec_id}) self.process = self.execute(self.cmd_line) def check(self): \"\"\" Checks", "grinder.threadNumber * inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\")) sleep_method.append(self.gen_statement(\"if sleep_time: grinder.logger.info('slept for %sms'", "data_fields[self.idx[\"Test\"]].strip() thread_id = worker_id + '/' + data_fields[self.idx[\"Thread\"]].strip() if thread_id", "script_props_file) with open(script_props_file) as spf: fds.write(spf.read()) fds.write(\"# Script Properies File", "def check(self): \"\"\" Checks if tool is still running. Also", "TaurusToolError: \"\"\" self.retcode = self.process.poll() if self.retcode is not None:", "self.concurrency += 1 url, error_msg = self.__parse_prev_lines(worker_id, lnum, r_code, bytes_count)", "parent_logger) def _parse_mirrors(self): links = [] if self.page_source is not", "out, err = self.call([\"java\", \"-classpath\", self.tool_path, \"net.grinder.Grinder\"]) if err: out", "li_elements] default_link = self.DOWNLOAD_LINK.format(version=self.grinder_version) if default_link not in links: links.append(default_link)", "VERSION = \"3.11\" LOCAL_PATH = \"~/.bzt/grinder-taurus/lib/grinder.jar\" def __init__(self, config=None, **kwargs):", "def __init__(self, config=None, **kwargs): settings = config or {} grinder_path", "file contents to fds :param fds: fds :return: \"\"\" base_props_file", "grinder = self._get_tool(Grinder, config=self.settings) self.settings[\"path\"] = grinder.tool_path self.java_helper = self._get_tool(TaurusJavaHelper)", "== 'finished': if self.concurrency > 0: self.concurrency -= 1 elif", "for link in li_elements] default_link = self.DOWNLOAD_LINK.format(version=self.grinder_version) if default_link not", "load.concurrency: fds.write(\"grinder.runs=%s\\n\" % load.iterations or 0) if load.concurrency: fds.write(\"grinder.threads=%s\\n\" %", "self).post_process() def __scenario_from_requests(self): \"\"\" Generate grinder scenario from requests :return:", "from bzt import TaurusConfigError, ToolError from bzt.engine import ScenarioExecutor, FileLister,", "% self.retcode, self.get_error_diagnostics()) return True return False def shutdown(self): \"\"\"", "self.lines[lineNo].strip() matched = self.DETAILS_REGEX.match(line) if not matched: continue if worker_id", "diagnostics.append(\"Grinder STDOUT:\\n\" + contents) if self.stderr is not None: with", "\"\"\" base_props_file = self.settings.get(\"properties-file\") if base_props_file: fds.write(\"# Base Properies File", "os import re import time from bzt import TaurusConfigError, ToolError", "- start if duration < 0.001: duration = 0.001 self.log.debug(\"Log", "con_time, latency, r_code, error_msg, source_id, bytes_count def __split(self, line): if", "fds.write(\"# Script Properies File Start: %s\\n\" % script_props_file) with open(script_props_file)", "parent_logger=self.log) self.idx = {} self.partial_buffer = \"\" self.start_time = 0", "# suppress errors if self.report_by_url: label = url elif test_id", "self.gen_method_definition(\"rampUpSleeper\", [\"self\"]) sleep_method.append(self.gen_statement(\"if grinder.runNumber != 0: return\")) sleep_method.append(self.gen_statement(\"tprops = grinder.properties.getPropertySubset('taurus.')\"))", "diagnostics.append(\"Grinder STDOUT:\\n\" + contents) return diagnostics class DataLogReader(ResultsReader): \"\"\" Class", "= self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping %s\", grinder_dist) unzip(grinder_dist, dest, 'grinder-' + self.version)", "Checks if tool is still running. Also checks if resulting", "len(data_fields) < max(self.idx.values()): return None, None return data_fields, worker_id def", "!= 0: raise ToolError(\"Gatling tool exited with non-zero code: %s\"", "ScenarioExecutor, FileLister, HavingInstallableTools, SelfDiagnosable from bzt.modules.aggregator import ConsolidatingAggregator, ResultsReader from", "stdout: %s\", self.tool_name, out) return True except CALL_PROBLEMS as exc:", "builder.label = self.label builder.build_source_code() builder.save(script) return script def install_required_tools(self): grinder", "int(int(data_fields[self.idx[\"Start time (ms since Epoch)\"]]) / 1000.0) r_time = int(data_fields[self.idx[\"Test", "install_required_tools(self): grinder = self._get_tool(Grinder, config=self.settings) self.settings[\"path\"] = grinder.tool_path self.java_helper =", "\"HTTP %s\" % r_code else: error_msg = \"Java exception calling", "download_link=download_link, **kwargs) self.version = self.VERSION self.mirror_manager = GrinderMirrorsManager(self.http_client, self.log, self.version)", "\"\"\" Write bzt properties to fds :param fds: :return: \"\"\"", "= get_full_path(self.tool_path, step_up=2) self.log.info(\"Will install %s into %s\", self.tool_name, dest)", "= self.gen_class_definition(\"TestRunner\", [\"object\"]) sleep_method = self.gen_method_definition(\"rampUpSleeper\", [\"self\"]) sleep_method.append(self.gen_statement(\"if grinder.runNumber !=", "bzt.utils import MirrorsManager, dehumanize_time, get_full_path, PythonGenerator, CALL_PROBLEMS from bzt.utils import", "= self.__scenario_from_requests() else: msg = \"There must be a script", "% (key, val)) fds.write(\"# Base Properies End\\n\\n\") def __write_scenario_props(self, fds,", "__list_to_nvpair_list(items): return \"[\" + \",\".join(\"NVPair(%r, %r)\" % (header, value) for", "r_code = data_fields[self.idx[\"HTTP response code\"]].strip() con_time = int(data_fields[self.idx[\"Time to resolve", "= line[line.find(' '):] data_fields = line.split(self.DELIMITER) if not data_fields[1].strip().isdigit(): return", "net.grinder.script.Grinder import grinder from net.grinder.plugin.http import HTTPRequest, HTTPPluginControl, HTTPUtilities from", "self.reader.report_by_url = self.settings.get(\"report-by-url\", False) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add", "in items) + \"]\" def gen_runner_class(self): runner_classdef = self.gen_class_definition(\"TestRunner\", [\"object\"])", "str(bytes_count) == matched.group(5): return matched.group(2), matched.group(4) return url, error_msg class", "import ConsolidatingAggregator, ResultsReader from bzt.modules.console import WidgetProvider, ExecutorWidget from bzt.modules.java", "if resulting logs contains any data and throws exception otherwise.", "r_time = int(data_fields[self.idx[\"Test time\"]]) / 1000.0 latency = int(data_fields[self.idx[\"Time to", "% self.tool_name) class GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE = \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK", "\"\"\" script = self.engine.create_artifact(\"grinder_requests\", \".py\") builder = GrinderScriptBuilder(self.get_scenario(), self.log) builder.label", "Start\\n\") for key, val in iteritems(base_props): fds.write(\"%s=%s\\n\" % (key, val))", "%s: %s\", self.tool_name, self.tool_path) try: out, err = self.call([\"java\", \"-classpath\",", "super(GrinderMirrorsManager, self).__init__(http_client, base_link, parent_logger) def _parse_mirrors(self): links = [] if", "settings = config or {} grinder_path = settings.get(\"path\", self.LOCAL_PATH) grinder_path", "out) return True except CALL_PROBLEMS as exc: self.log.warning(\"%s check failed:", "super(GrinderScriptBuilder, self).__init__(scenario, parent_logger) self.label = \"BZT Requests\" def build_source_code(self): self.log.debug(\"Generating", "HTTPPluginControl.getConnectionDefaults()\", indent=0)) self.root.append(self.gen_statement(\"utilities = HTTPPluginControl.getHTTPUtilities()\", indent=0)) headers = self.scenario.get_headers() if", "settings.get(\"path\", self.LOCAL_PATH) grinder_path = get_full_path(grinder_path) download_link = settings.get(\"download-link\", \"\") super(Grinder,", "line[line.find(' '):] header_list = line.strip().split(self.DELIMITER) for _ix, field in enumerate(header_list):", "# looking max 100 lines back. TODO: parameterize? line =", "% sleep_time)\")) sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No sleep needed')\")) sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method) main_method =", "None: label = \"Grinder: %s\" % os.path.basename(self.script) else: label =", "connection\"]]) / 1000.0 bytes_count = int(data_fields[self.idx[\"HTTP response length\"]].strip()) test_id =", "super(Grinder, self).__init__(tool_path=grinder_path, download_link=download_link, **kwargs) self.version = self.VERSION self.mirror_manager = GrinderMirrorsManager(self.http_client,", "re.compile(r'<li id=\".*?\">') li_elements = li_search_pattern.findall(self.page_source) if li_elements: links = [base_link.format(version=self.grinder_version,", "= self.__parse_prev_lines(worker_id, lnum, r_code, bytes_count) if int(data_fields[self.idx[\"Errors\"]]) or int(data_fields[self.idx['HTTP response", "= ExecutorWidget(self, label) if self.get_load().ramp_up: self.widget.duration += self.get_load().ramp_up # because", "with open(self.properties_file, 'w') as fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file", "for key, val in iteritems(base_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"#", "License, Version 2.0 (the \"License\"); you may not use this", "% (method, url, params, headers))) think_time = dehumanize_time(req.priority_option('think-time')) if think_time:", "set(line_parts[1:5]) == {'Test', 'name', 'for', 'ID'}: test_id = line_parts[5][:-1] test_name", "calling TestRunner\" else: error_msg = None # suppress errors if", "headers))) think_time = dehumanize_time(req.priority_option('think-time')) if think_time: main_method.append(self.gen_statement(\"grinder.sleep(%s)\" % int(think_time *", "if not error_msg: if r_code != '0': error_msg = \"HTTP", "Base Properies File Start: %s\\n\" % base_props_file) with open(base_props_file) as", "= self.label self.script = self.get_script_path() if not self.script: if \"requests\"", "self).__init__(http_client, base_link, parent_logger) def _parse_mirrors(self): links = [] if self.page_source", "self.concurrency += 1 pass elif line_parts[1] == 'finished': if self.concurrency", "'name', 'for', 'ID'}: test_id = line_parts[5][:-1] test_name = ' '.join(line_parts[6:])", "fast as possible. \"\"\" self.env.set({\"T_GRINDER_PREFIX\": self.exec_id}) self.process = self.execute(self.cmd_line) def", "0 self.end_time = 0 self.concurrency = 0 self.test_names = {}", "\"\"\" Write scenario props and scenario file props to fds", "line: %s\", line.strip()) continue yield self.parse_line(data_fields, worker_id, lnum) if lnum", "If tool is still running - let's stop it. \"\"\"", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "matched.group(1) and r_code == matched.group(3) and str(bytes_count) == matched.group(5): return", "= [] script_file_path = self.get_script_path() if script_file_path: resource_files.append(script_file_path) prop_file =", "self.cmd_line = [\"java\", \"net.grinder.Grinder\", self.properties_file] def startup(self): \"\"\" Should start", "Module holds all stuff regarding Grinder tool usage Copyright 2015", "name records continue line = line[line.find(' '):] header_list = line.strip().split(self.DELIMITER)", "not data_fields[1].strip().isdigit(): return None, None if len(data_fields) < max(self.idx.values()): return", "[] if self.stdout is not None: with open(self.stdout.name) as fds:", "File End: %s\\n\\n\" % base_props_file) # base props base_props =", "fds.write(\"# BZT Properies Start\\n\") fds.write(\"grinder.hostID=%s\\n\" % self.exec_id) fds.write(\"grinder.script=%s\\n\" % self.script.replace(os.path.sep,", "'grinder-' + self.version) os.remove(grinder_dist) self.log.info(\"Installed grinder successfully\") if not self.check_if_installed():", "indent=0)) cookie_flag = int(self.scenario.get(\"store-cookie\", True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" % cookie_flag, indent=0)) self.root.append(self.gen_new_line())", "if err: out += err self.log.debug(\"%s stdout: %s\", self.tool_name, out)", "script_props_file: fds.write(\"# Script Properies File Start: %s\\n\" % script_props_file) with", "None for lineNo in reversed(range(max(lnum - 100, 0), lnum)): #", "* 1000), indent=0)) cookie_flag = int(self.scenario.get(\"store-cookie\", True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" % cookie_flag,", "regarding Grinder tool usage Copyright 2015 BlazeMeter Inc. Licensed under", "generation \" msg += \"to run Grinder tool (%s)\" %", "value) for header, value in items) + \"]\" def gen_runner_class(self):", "self.concurrency = 0 self.test_names = {} self.known_threads = set() def", "with open(base_props_file) as bpf: fds.write(bpf.read()) fds.write(\"# Base Properies File End:", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "logs contains any data and throws exception otherwise. :return: bool", "_parse_mirrors(self): links = [] if self.page_source is not None: self.log.debug('Parsing", "script_props_file = scenario.get(\"properties-file\") if script_props_file: fds.write(\"# Script Properies File Start:", "Grinder tool usage Copyright 2015 BlazeMeter Inc. Licensed under the", "from bzt.modules.console import WidgetProvider, ExecutorWidget from bzt.modules.java import TaurusJavaHelper from", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "sleep_time)\")) sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No sleep needed')\")) sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method) main_method = self.gen_method_definition(\"__call__\",", "value in items) + \"]\" def gen_runner_class(self): runner_classdef = self.gen_class_definition(\"TestRunner\",", "% (self.partial_buffer, line) self.partial_buffer = \"\" line = line.strip() if", "_read(self, last_pass=False): \"\"\" Generator method that returns next portion of", "r_code, error_msg, source_id, bytes_count def __split(self, line): if not line.endswith(\"\\n\"):", "Class to read KPI from data log \"\"\" DELIMITER =", "self.test_names[test_id] = test_name self.log.debug(\"Recognized test id %s => %s\", test_id,", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "elif set(line_parts[1:5]) == {'Test', 'name', 'for', 'ID'}: test_id = line_parts[5][:-1]", "parameterize? line = self.lines[lineNo].strip() matched = self.DETAILS_REGEX.match(line) if not matched:", "mirror=link.strip('<li id=\"').strip('\">')) for link in li_elements] default_link = self.DOWNLOAD_LINK.format(version=self.grinder_version) if", "if default_link not in links: links.append(default_link) self.log.debug('Total mirrors: %d', len(links))", "= HTTPPluginControl.getHTTPUtilities()\", indent=0)) headers = self.scenario.get_headers() if not self.scenario.get(\"keepalive\", True):", "= 'close' if headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0)) for header, value in", "under the License is distributed on an \"AS IS\" BASIS,", "100 lines back. TODO: parameterize? line = self.lines[lineNo].strip() matched =", "if self.start_time: self.end_time = time.time() self.log.debug(\"Grinder worked for %s seconds\",", "portion of data :param last_pass: \"\"\" self.log.debug(\"Reading grinder results...\") self.lines", "self.stderr = open(self.engine.create_artifact(\"grinder\", \".err\"), \"w\") self.install_required_tools() scenario = self.get_scenario() self.exec_id", "if not data_fields: self.log.debug(\"Skipping line: %s\", line.strip()) continue yield self.parse_line(data_fields,", "from requests :return: script \"\"\" script = self.engine.create_artifact(\"grinder_requests\", \".py\") builder", "None, None return data_fields, worker_id def __parse_prev_lines(self, worker_id, lnum, r_code,", "yield self.parse_line(data_fields, worker_id, lnum) if lnum is not None: duration", "import HTTPRequest, HTTPPluginControl, HTTPUtilities from HTTPClient import NVPair \"\"\" def", "def __init__(self, scenario, parent_logger): super(GrinderScriptBuilder, self).__init__(scenario, parent_logger) self.label = \"BZT", "data_fields, worker_id def __parse_prev_lines(self, worker_id, lnum, r_code, bytes_count): url =", "this file except in compliance with the License. You may", "requests :return: script \"\"\" script = self.engine.create_artifact(\"grinder_requests\", \".py\") builder =", "= self._get_tool(Grinder, config=self.settings) self.settings[\"path\"] = grinder.tool_path self.java_helper = self._get_tool(TaurusJavaHelper) required_tools", "= tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)\")) sleep_method.append(self.gen_statement(\"sleep_time = int(1000 * grinder.threadNumber *", "= int(data_fields[self.idx[\"Test time\"]]) / 1000.0 latency = int(data_fields[self.idx[\"Time to first", "label = None self.widget = ExecutorWidget(self, label) if self.get_load().ramp_up: self.widget.duration", "self.known_threads.add(thread_id) self.concurrency += 1 url, error_msg = self.__parse_prev_lines(worker_id, lnum, r_code,", "\"/grinder-{version}-binary.zip?r=&ts=\" + str(int(time.time())) + \"&use_mirror=autoselect\" def __init__(self, http_client, parent_logger, grinder_version):", "returns next portion of data :param last_pass: \"\"\" self.log.debug(\"Reading grinder", "= [base_link.format(version=self.grinder_version, mirror=link.strip('<li id=\"').strip('\">')) for link in li_elements] default_link =", "RequiredTool, JavaVM, shutdown_process, TclLibrary, FileReader, RESOURCES_DIR class GrinderExecutor(ScenarioExecutor, WidgetProvider, FileLister,", "fds.write(\"# Base Properies Start\\n\") for key, val in iteritems(base_props): fds.write(\"%s=%s\\n\"", "= None self.java_helper = None def __write_base_props(self, fds): \"\"\" write", "1 pass elif line_parts[1] == 'finished': if self.concurrency > 0:", "indent=0)) self.root.append(self.gen_statement('test = Test(1, \"%s\")' % self.label, indent=0)) self.root.append(self.gen_statement('test.record(request)', indent=0))", "self.widget def resource_files(self): resource_files = [] script_file_path = self.get_script_path() if", "def _parse_mirrors(self): links = [] if self.page_source is not None:", "resource_files.append(script_file_path) prop_file = self.get_scenario().get(\"properties-file\") if prop_file: resource_files.append(prop_file) return resource_files def", "self.retcode = self.process.poll() if self.retcode is not None: if self.retcode", "self.settings.get(\"report-by-url\", False) if isinstance(self.engine.aggregator, ConsolidatingAggregator): self.engine.aggregator.add_underling(self.reader) # add logback configurations", ":param last_pass: \"\"\" self.log.debug(\"Reading grinder results...\") self.lines = list(self.file.get_lines(size=1024 *", "'close' if headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0)) for header, value in iteritems(headers):", "= \"HTTP %s\" % r_code else: error_msg = \"Java exception", "set() def _read(self, last_pass=False): \"\"\" Generator method that returns next", "/ 1000.0 con_time += int(data_fields[self.idx[\"Time to establish connection\"]]) / 1000.0", "file except in compliance with the License. You may obtain", "= \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\ \"/grinder-{version}-binary.zip?r=&ts=\" + str(int(time.time())) + \"&use_mirror=autoselect\" def __init__(self,", "code\"]].strip() con_time = int(data_fields[self.idx[\"Time to resolve host\"]]) / 1000.0 con_time", "we have ramp-down equal to rampup return self.widget def resource_files(self):", "OR CONDITIONS OF ANY KIND, either express or implied. See", "error_msg = None # suppress errors if self.report_by_url: label =", "and throws exception otherwise. :return: bool :raise TaurusToolError: \"\"\" self.retcode", "return\")) sleep_method.append(self.gen_statement(\"tprops = grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc = tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)\")) sleep_method.append(self.gen_statement(\"sleep_time", "not tool.check_if_installed(): tool.install() def get_widget(self): if not self.widget: if self.script", "base_link = self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager, self).__init__(http_client, base_link, parent_logger) def _parse_mirrors(self): links", "in case you need them\\n\") fds.write(\"taurus.concurrency=%s\\n\" % load.concurrency) fds.write(\"taurus.throughput=%s\\n\" %", "None: with open(self.stderr.name) as fds: contents = fds.read().strip() if contents.strip():", "fds.write(\"grinder.threads=%s\\n\" % load.concurrency) if load.duration: fds.write(\"grinder.duration=%s\\n\" % int(load.duration * 1000))", "FileReader, RESOURCES_DIR class GrinderExecutor(ScenarioExecutor, WidgetProvider, FileLister, HavingInstallableTools, SelfDiagnosable): \"\"\" Grinder", "or int(data_fields[self.idx['HTTP response errors']]): if not error_msg: if r_code !=", "load.duration: fds.write(\"grinder.duration=%s\\n\" % int(load.duration * 1000)) fds.write(\"# taurus load values", "+= err self.log.debug(\"%s stdout: %s\", self.tool_name, out) return True except", "self.script.replace(os.path.sep, \"/\")) fds.write(\"grinder.logDirectory=%s\\n\" % self.engine.artifacts_dir.replace(os.path.sep, \"/\")) load = self.get_load() if", "holds all stuff regarding Grinder tool usage Copyright 2015 BlazeMeter", "cookie_flag = int(self.scenario.get(\"store-cookie\", True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" % cookie_flag, indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class())", "test id %s => %s\", test_id, test_name) return None, None", "# add logback configurations used by worker processes (logback-worker.xml) self.env.add_path({\"CLASSPATH\":", "return url, error_msg class Grinder(RequiredTool): # todo: take it from", "lnum = None start = time.time() for lnum, line in", "in self.known_threads: self.known_threads.add(thread_id) self.concurrency += 1 url, error_msg = self.__parse_prev_lines(worker_id,", "net.grinder.plugin.http import HTTPRequest, HTTPPluginControl, HTTPUtilities from HTTPClient import NVPair \"\"\"", "bpf: fds.write(bpf.read()) fds.write(\"# Base Properies File End: %s\\n\\n\" % base_props_file)", "if load.concurrency: fds.write(\"grinder.threads=%s\\n\" % load.concurrency) if load.duration: fds.write(\"grinder.duration=%s\\n\" % int(load.duration", "self.get_scenario() self.exec_id = self.label self.script = self.get_script_path() if not self.script:", "= int(int(data_fields[self.idx[\"Start time (ms since Epoch)\"]]) / 1000.0) r_time =", "headers['Connection'] = 'close' if headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0)) for header, value", "script was generated by Taurus\", indent=0)) self.root.append(self.add_imports()) self.root.append(self.gen_new_line()) default_address =", "fds: :param scenario: dict :return: \"\"\" script_props_file = scenario.get(\"properties-file\") if", "data_fields, worker_id, lnum): worker_id = worker_id.split('.')[1] t_stamp = int(int(data_fields[self.idx[\"Start time", "== matched.group(1) and r_code == matched.group(3) and str(bytes_count) == matched.group(5):", "HTTPClient import NVPair \"\"\" def __init__(self, scenario, parent_logger): super(GrinderScriptBuilder, self).__init__(scenario,", "worker_id, lnum): worker_id = worker_id.split('.')[1] t_stamp = int(int(data_fields[self.idx[\"Start time (ms", "get_widget(self): if not self.widget: if self.script is not None: label", "sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method) main_method = self.gen_method_definition(\"__call__\", [\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for req in", "FileLister, HavingInstallableTools, SelfDiagnosable): \"\"\" Grinder executor module \"\"\" def __init__(self):", "= None def __write_base_props(self, fds): \"\"\" write base properties and", "dict :return: \"\"\" script_props_file = scenario.get(\"properties-file\") if script_props_file: fds.write(\"# Script", "Grinder\") self.root.append(self.gen_comment(\"This script was generated by Taurus\", indent=0)) self.root.append(self.add_imports()) self.root.append(self.gen_new_line())", "re import time from bzt import TaurusConfigError, ToolError from bzt.engine", "sleep_method.append(self.gen_statement(\"tprops = grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc = tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)\")) sleep_method.append(self.gen_statement(\"sleep_time =", "if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) return diagnostics class DataLogReader(ResultsReader):", "= \"Grinder: %s\" % os.path.basename(self.script) else: label = None self.widget", "self.DOWNLOAD_LINK.format(version=self.grinder_version) if default_link not in links: links.append(default_link) self.log.debug('Total mirrors: %d',", "grinder scenario from requests :return: script \"\"\" script = self.engine.create_artifact(\"grinder_requests\",", "script = self.engine.create_artifact(\"grinder_requests\", \".py\") builder = GrinderScriptBuilder(self.get_scenario(), self.log) builder.label =", "field in enumerate(header_list): self.idx[field.strip()] = _ix data_fields, worker_id = self.__split(line)", "worker processes (logback-worker.xml) self.env.add_path({\"CLASSPATH\": RESOURCES_DIR}, finish=True) self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path}, finish=True) self.env.add_path({\"CLASSPATH\":", "def __parse_prev_lines(self, worker_id, lnum, r_code, bytes_count): url = '' error_msg", "% os.path.basename(self.script) else: label = None self.widget = ExecutorWidget(self, label)", "url elif test_id in self.test_names: label = self.test_names[test_id] else: label", "self).__init__() self.script = None self.exec_id = \"grinder-bzt-%s\" % id(self) self.properties_file", "if load.iterations or load.concurrency: fds.write(\"grinder.runs=%s\\n\" % load.iterations or 0) if", "2.0 (the \"License\"); you may not use this file except", "= \"grinder-bzt-%s\" % id(self) self.properties_file = None self.kpi_file = None", "!= '0': error_msg = \"HTTP %s\" % r_code else: error_msg", "self.root.append(self.gen_statement('test = Test(1, \"%s\")' % self.label, indent=0)) self.root.append(self.gen_statement('test.record(request)', indent=0)) self.root.append(self.gen_new_line())", "length\"]].strip()) test_id = data_fields[self.idx[\"Test\"]].strip() thread_id = worker_id + '/' +", "a script file or requests for its generation \" msg", "read KPI from data log \"\"\" DELIMITER = \",\" DETAILS_REGEX", "self.log.debug(\"Recognized test id %s => %s\", test_id, test_name) return None,", "CALL_PROBLEMS from bzt.utils import unzip, RequiredTool, JavaVM, shutdown_process, TclLibrary, FileReader,", "use this file except in compliance with the License. You", "from bzt.engine import ScenarioExecutor, FileLister, HavingInstallableTools, SelfDiagnosable from bzt.modules.aggregator import", "self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\", None)}, finish=True) self.cmd_line = [\"java\", \"net.grinder.Grinder\", self.properties_file] def", "Epoch)\"]]) / 1000.0) r_time = int(data_fields[self.idx[\"Test time\"]]) / 1000.0 latency", "scenario.get(\"properties-file\") if script_props_file: fds.write(\"# Script Properies File Start: %s\\n\" %", "if self.page_source is not None: self.log.debug('Parsing mirrors...') base_link = \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\"", "properties file contents to fds :param fds: fds :return: \"\"\"", "self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r, %s, %s)\" % (method, url, params, headers))) think_time", "[] script_file_path = self.get_script_path() if script_file_path: resource_files.append(script_file_path) prop_file = self.get_scenario().get(\"properties-file\")", "lnum)): # looking max 100 lines back. TODO: parameterize? line", "self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class()) @staticmethod def __list_to_nvpair_list(items): return \"[\" + \",\".join(\"NVPair(%r, %r)\"", "line.strip() if not line.startswith('data.'): line_parts = line.split(' ') if len(line_parts)", "or {} grinder_path = settings.get(\"path\", self.LOCAL_PATH) grinder_path = get_full_path(grinder_path) download_link", "self.log, self.version) def check_if_installed(self): self.log.debug(\"Trying %s: %s\", self.tool_name, self.tool_path) try:", "HTTPPluginControl, HTTPUtilities from HTTPClient import NVPair \"\"\" def __init__(self, scenario,", "Base Properies Start\\n\") for key, val in iteritems(base_props): fds.write(\"%s=%s\\n\" %", "req in self.scenario.get_requests(): if not isinstance(req, HTTPRequest): msg = \"Grinder", "os.path.join(self.engine.artifacts_dir, self.exec_id + \"-kpi.log\") self.reader = DataLogReader(self.kpi_file, self.log) self.reader.report_by_url =", "from bzt.modules.aggregator import ConsolidatingAggregator, ResultsReader from bzt.modules.console import WidgetProvider, ExecutorWidget", "to resolve host\"]]) / 1000.0 con_time += int(data_fields[self.idx[\"Time to establish", "else: error_msg = \"Java exception calling TestRunner\" else: error_msg =", "for req in self.scenario.get_requests(): if not isinstance(req, HTTPRequest): msg =", "def __write_bzt_props(self, fds): \"\"\" Write bzt properties to fds :param", "grinder results...\") self.lines = list(self.file.get_lines(size=1024 * 1024, last_pass=last_pass)) lnum =", "(the \"License\"); you may not use this file except in", "(.+) -> (\\S+) (.+), (\\d+) bytes\") def __init__(self, filename, parent_logger):", "[\"java\", \"net.grinder.Grinder\", self.properties_file] def startup(self): \"\"\" Should start the tool", "return None, None worker_id = line[:line.find(' ')] line = line[line.find('", "if self.get_load().ramp_up: self.widget.duration += self.get_load().ramp_up # because we have ramp-down", "\"grinder-bzt-%s\" % id(self) self.properties_file = None self.kpi_file = None self.cmd_line", "worker_id, lnum, r_code, bytes_count): url = '' error_msg = None", "as fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id", "import os import re import time from bzt import TaurusConfigError,", "' '.join(line_parts[6:]) self.test_names[test_id] = test_name self.log.debug(\"Recognized test id %s =>", "scenario file props to fds :param fds: :param scenario: dict", "in enumerate(header_list): self.idx[field.strip()] = _ix data_fields, worker_id = self.__split(line) if", "the Apache License, Version 2.0 (the \"License\"); you may not", "or implied. See the License for the specific language governing", "load.steps) fds.write(\"taurus.hold_for=%s\\n\" % load.hold) fds.write(\"taurus.iterations=%s\\n\" % load.iterations) fds.write(\"# BZT Properies", "class GrinderScriptBuilder(PythonGenerator): IMPORTS = \"\"\" from net.grinder.script import Test from", "KIND, either express or implied. See the License for the", "% script_props_file) with open(script_props_file) as spf: fds.write(spf.read()) fds.write(\"# Script Properies", "True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\" % cookie_flag, indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class()) @staticmethod def __list_to_nvpair_list(items):", "self.gen_method_definition(\"__call__\", [\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for req in self.scenario.get_requests(): if not isinstance(req,", "= list(self.file.get_lines(size=1024 * 1024, last_pass=last_pass)) lnum = None start =", "source_id, bytes_count def __split(self, line): if not line.endswith(\"\\n\"): self.partial_buffer +=", "check(self): \"\"\" Checks if tool is still running. Also checks", "case you need them\\n\") fds.write(\"taurus.concurrency=%s\\n\" % load.concurrency) fds.write(\"taurus.throughput=%s\\n\" % load.throughput)", "Python script for Grinder\") self.root.append(self.gen_comment(\"This script was generated by Taurus\",", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "for _ix, field in enumerate(header_list): self.idx[field.strip()] = _ix data_fields, worker_id", "= grinder_version base_link = self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager, self).__init__(http_client, base_link, parent_logger) def", "for %sms' % sleep_time)\")) sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No sleep needed')\")) sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method)", "and str(bytes_count) == matched.group(5): return matched.group(2), matched.group(4) return url, error_msg", "= self.test_names[test_id] else: label = \"Test #%s\" % test_id source_id", "-> (\\S+) (.+), (\\d+) bytes\") def __init__(self, filename, parent_logger): super(DataLogReader,", "(key, val)) fds.write(\"# Scenario Properies End\\n\\n\") def __write_bzt_props(self, fds): \"\"\"", "worker_id = self.__split(line) if not data_fields: self.log.debug(\"Skipping line: %s\", line.strip())", "required_tools = [self._get_tool(TclLibrary), self._get_tool(JavaVM), self.java_helper, grinder] for tool in required_tools:", "100, 0), lnum)): # looking max 100 lines back. TODO:", "global_timeout = dehumanize_time(self.scenario.get(\"timeout\", None)) if global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" % int(global_timeout *", "establish connection\"]]) / 1000.0 bytes_count = int(data_fields[self.idx[\"HTTP response length\"]].strip()) test_id", "(header, value) for header, value in items) + \"]\" def", "'starting,': # self.concurrency += 1 pass elif line_parts[1] == 'finished':", "self.call([\"java\", \"-classpath\", self.tool_path, \"net.grinder.Grinder\"]) if err: out += err self.log.debug(\"%s", "fds.write(\"# Scenario Properies Start\\n\") for key, val in iteritems(local_props): fds.write(\"%s=%s\\n\"", "first byte\"]]) / 1000.0 r_code = data_fields[self.idx[\"HTTP response code\"]].strip() con_time", "\"\"\" Collect data file artifact \"\"\" if self.kpi_file: self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor,", "the License for the specific language governing permissions and limitations", "implied. See the License for the specific language governing permissions", "time (ms since Epoch)\"]]) / 1000.0) r_time = int(data_fields[self.idx[\"Test time\"]])", "ToolError(\"Unable to run %s after installation!\" % self.tool_name) class GrinderMirrorsManager(MirrorsManager):", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "FileLister, HavingInstallableTools, SelfDiagnosable from bzt.modules.aggregator import ConsolidatingAggregator, ResultsReader from bzt.modules.console", "= time.time() for lnum, line in enumerate(self.lines): if not self.idx:", "# todo: take it from maven and convert to JarTool(?)", "= get_full_path(grinder_path) download_link = settings.get(\"download-link\", \"\") super(Grinder, self).__init__(tool_path=grinder_path, download_link=download_link, **kwargs)", "+ 1) / duration) def parse_line(self, data_fields, worker_id, lnum): worker_id", "if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) if self.stderr is not", "fds.write(\"# taurus load values in case you need them\\n\") fds.write(\"taurus.concurrency=%s\\n\"", "self.report_by_url = False self.log = parent_logger.getChild(self.__class__.__name__) self.file = FileReader(filename=filename, parent_logger=self.log)", "for header, value in items) + \"]\" def gen_runner_class(self): runner_classdef", "req.method.upper() url = req.url local_headers = req.headers params = \"[]\"", "http_client, parent_logger, grinder_version): self.grinder_version = grinder_version base_link = self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager,", "self.root.append(self.gen_statement(\"utilities = HTTPPluginControl.getHTTPUtilities()\", indent=0)) headers = self.scenario.get_headers() if not self.scenario.get(\"keepalive\",", "byte\"]]) / 1000.0 r_code = data_fields[self.idx[\"HTTP response code\"]].strip() con_time =", "\"\" line = line.strip() if not line.startswith('data.'): line_parts = line.split('", "= \"Java exception calling TestRunner\" else: error_msg = None #", "% self.exec_id) fds.write(\"grinder.script=%s\\n\" % self.script.replace(os.path.sep, \"/\")) fds.write(\"grinder.logDirectory=%s\\n\" % self.engine.artifacts_dir.replace(os.path.sep, \"/\"))", "start the tool as fast as possible. \"\"\" self.env.set({\"T_GRINDER_PREFIX\": self.exec_id})", "shutdown_process(self.process, self.log) if self.start_time: self.end_time = time.time() self.log.debug(\"Grinder worked for", "if default_address else \"\" self.root.append(self.gen_statement('request = HTTPRequest(%s)' % url_arg, indent=0))", "writing, software distributed under the License is distributed on an", "fds.write(\"# Script Properies File End: %s\\n\\n\" % script_props_file) # scenario", "self.scenario.get(\"keepalive\", True): headers['Connection'] = 'close' if headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0)) for", "False self.log = parent_logger.getChild(self.__class__.__name__) self.file = FileReader(filename=filename, parent_logger=self.log) self.idx =", "self._get_tool(Grinder, config=self.settings) self.settings[\"path\"] = grinder.tool_path self.java_helper = self._get_tool(TaurusJavaHelper) required_tools =", "if self.concurrency > 0: self.concurrency -= 1 elif set(line_parts[1:5]) ==", "if self.stdout is not None: with open(self.stdout.name) as fds: contents", "label = self.test_names[test_id] else: label = \"Test #%s\" % test_id", "in compliance with the License. You may obtain a copy", "error_msg = \"Java exception calling TestRunner\" else: error_msg = None", "\"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\ \"-binary.zip/download?use_mirror={mirror}\" li_search_pattern = re.compile(r'<li id=\".*?\">') li_elements = li_search_pattern.findall(self.page_source)", "to first byte\"]]) / 1000.0 r_code = data_fields[self.idx[\"HTTP response code\"]].strip()", "\"w\") self.stderr = open(self.engine.create_artifact(\"grinder\", \".err\"), \"w\") self.install_required_tools() scenario = self.get_scenario()", "line): if not line.endswith(\"\\n\"): self.partial_buffer += line return None, None", "base_props_file) # base props base_props = self.settings.get(\"properties\") if base_props: fds.write(\"#", "it. \"\"\" shutdown_process(self.process, self.log) if self.start_time: self.end_time = time.time() self.log.debug(\"Grinder", "duration < 0.001: duration = 0.001 self.log.debug(\"Log reading speed: %s", "\"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK = \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\ \"/grinder-{version}-binary.zip?r=&ts=\" + str(int(time.time())) + \"&use_mirror=autoselect\"", "contains any data and throws exception otherwise. :return: bool :raise", "1024, last_pass=last_pass)) lnum = None start = time.time() for lnum,", "raise ToolError(\"Gatling tool exited with non-zero code: %s\" % self.retcode,", "err = self.call([\"java\", \"-classpath\", self.tool_path, \"net.grinder.Grinder\"]) if err: out +=", "open(base_props_file) as bpf: fds.write(bpf.read()) fds.write(\"# Base Properies File End: %s\\n\\n\"", "\"\"\" def __init__(self, scenario, parent_logger): super(GrinderScriptBuilder, self).__init__(scenario, parent_logger) self.label =", "%r),\" % (header, value), indent=4)) self.root.append(self.gen_statement(\"])\", indent=0)) global_timeout = dehumanize_time(self.scenario.get(\"timeout\",", "raise TaurusConfigError(msg) self.properties_file = self.engine.create_artifact(\"grinder\", \".properties\") with open(self.properties_file, 'w') as", "think_time = dehumanize_time(req.priority_option('think-time')) if think_time: main_method.append(self.gen_statement(\"grinder.sleep(%s)\" % int(think_time * 1000)))", "either express or implied. See the License for the specific", "self.process.poll() if self.retcode is not None: if self.retcode != 0:", "%s\", self.tool_name, dest) grinder_dist = self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping %s\", grinder_dist) unzip(grinder_dist,", "error_msg = None for lineNo in reversed(range(max(lnum - 100, 0),", "\"License\"); you may not use this file except in compliance", "generator doesn't support '%s' blocks, skipping\" self.log.warning(msg, req.NAME) continue method", "script for Grinder\") self.root.append(self.gen_comment(\"This script was generated by Taurus\", indent=0))", "except CALL_PROBLEMS as exc: self.log.warning(\"%s check failed: %s\", self.tool_name, exc)", "RESOURCES_DIR}, finish=True) self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path}, finish=True) self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\", None)}, finish=True) self.cmd_line", "%r)\" % (header, value) for header, value in items) +", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "maven and convert to JarTool(?) VERSION = \"3.11\" LOCAL_PATH =", "self.exec_id + \"-kpi.log\") self.reader = DataLogReader(self.kpi_file, self.log) self.reader.report_by_url = self.settings.get(\"report-by-url\",", "contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) if self.stderr is not None:", "r_code, bytes_count) if int(data_fields[self.idx[\"Errors\"]]) or int(data_fields[self.idx['HTTP response errors']]): if not", "maybe use worker_id somehow? return t_stamp, label, self.concurrency, r_time, con_time,", "self.end_time = time.time() self.log.debug(\"Grinder worked for %s seconds\", self.end_time -", "non-zero code: %s\" % self.retcode, self.get_error_diagnostics()) return True return False", "self.tool_name, self.tool_path) try: out, err = self.call([\"java\", \"-classpath\", self.tool_path, \"net.grinder.Grinder\"])", "line.strip()) continue yield self.parse_line(data_fields, worker_id, lnum) if lnum is not", "self.exec_id = self.label self.script = self.get_script_path() if not self.script: if", "load.iterations) fds.write(\"# BZT Properies End\\n\") def prepare(self): self.stdout = open(self.engine.create_artifact(\"grinder\",", "FileReader(filename=filename, parent_logger=self.log) self.idx = {} self.partial_buffer = \"\" self.start_time =", "len(line_parts) > 1: if line_parts[1] == 'starting,': # self.concurrency +=", "'):] header_list = line.strip().split(self.DELIMITER) for _ix, field in enumerate(header_list): self.idx[field.strip()]", "= \"%s%s\" % (self.partial_buffer, line) self.partial_buffer = \"\" line =", "scenario.get(\"properties\") if local_props: fds.write(\"# Scenario Properies Start\\n\") for key, val", "resource_files.append(prop_file) return resource_files def get_error_diagnostics(self): diagnostics = [] if self.stdout", "governing permissions and limitations under the License. \"\"\" import os", "def __split(self, line): if not line.endswith(\"\\n\"): self.partial_buffer += line return", "and r_code == matched.group(3) and str(bytes_count) == matched.group(5): return matched.group(2),", "results...\") self.lines = list(self.file.get_lines(size=1024 * 1024, last_pass=last_pass)) lnum = None", "None worker_id = line[:line.find(' ')] line = line[line.find(' '):] data_fields", "self.log.debug(\"Log reading speed: %s lines/s\", (lnum + 1) / duration)", "Test from net.grinder.script.Grinder import grinder from net.grinder.plugin.http import HTTPRequest, HTTPPluginControl,", "= worker_id + '/' + data_fields[self.idx[\"Thread\"]].strip() if thread_id not in", "errors if self.report_by_url: label = url elif test_id in self.test_names:", "self.log.warning(\"%s check failed: %s\", self.tool_name, exc) return False def install(self):", "with open(self.stdout.name) as fds: contents = fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder", "its generation \" msg += \"to run Grinder tool (%s)\"", "%s\\n\\n\" % script_props_file) # scenario props local_props = scenario.get(\"properties\") if", "test_name) return None, None worker_id = line[:line.find(' ')] line =", "\"[]\" headers = self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r, %s, %s)\" % (method, url,", "self.end_time - self.start_time) def post_process(self): \"\"\" Collect data file artifact", "req.NAME) continue method = req.method.upper() url = req.url local_headers =", "grinder successfully\") if not self.check_if_installed(): raise ToolError(\"Unable to run %s", "any data and throws exception otherwise. :return: bool :raise TaurusToolError:", "= \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK = \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\ \"/grinder-{version}-binary.zip?r=&ts=\" +", "for lnum, line in enumerate(self.lines): if not self.idx: if not", "get_error_diagnostics(self): diagnostics = [] if self.stdout is not None: with", "id=\".*?\">') li_elements = li_search_pattern.findall(self.page_source) if li_elements: links = [base_link.format(version=self.grinder_version, mirror=link.strip('<li", "grinder_dist = self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping %s\", grinder_dist) unzip(grinder_dist, dest, 'grinder-' +", "be a script file or requests for its generation \"", "= 0.001 self.log.debug(\"Log reading speed: %s lines/s\", (lnum + 1)", "self.execute(self.cmd_line) def check(self): \"\"\" Checks if tool is still running.", "Requests\" def build_source_code(self): self.log.debug(\"Generating Python script for Grinder\") self.root.append(self.gen_comment(\"This script", "0)/tprops.getInt('concurrency', 1)\")) sleep_method.append(self.gen_statement(\"sleep_time = int(1000 * grinder.threadNumber * inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time,", "line.strip().split(self.DELIMITER) for _ix, field in enumerate(header_list): self.idx[field.strip()] = _ix data_fields,", "self.label, indent=0)) self.root.append(self.gen_statement('test.record(request)', indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults = HTTPPluginControl.getConnectionDefaults()\", indent=0)) self.root.append(self.gen_statement(\"utilities", "None self.kpi_file = None self.cmd_line = None self.process = None", "exc) return False def install(self): dest = get_full_path(self.tool_path, step_up=2) self.log.info(\"Will", "self.partial_buffer = \"\" line = line.strip() if not line.startswith('data.'): line_parts", "__init__(self, filename, parent_logger): super(DataLogReader, self).__init__() self.report_by_url = False self.log =", "settings.get(\"download-link\", \"\") super(Grinder, self).__init__(tool_path=grinder_path, download_link=download_link, **kwargs) self.version = self.VERSION self.mirror_manager", "for %s seconds\", self.end_time - self.start_time) def post_process(self): \"\"\" Collect", "\"net.grinder.Grinder\"]) if err: out += err self.log.debug(\"%s stdout: %s\", self.tool_name,", "lnum, r_code, bytes_count): url = '' error_msg = None for", "properties and base properties file contents to fds :param fds:", "\"Grinder script generator doesn't support '%s' blocks, skipping\" self.log.warning(msg, req.NAME)", "data and throws exception otherwise. :return: bool :raise TaurusToolError: \"\"\"", "self.scenario.get(\"default-address\") url_arg = \"url=%r\" % default_address if default_address else \"\"", "load.ramp_up) fds.write(\"taurus.steps=%s\\n\" % load.steps) fds.write(\"taurus.hold_for=%s\\n\" % load.hold) fds.write(\"taurus.iterations=%s\\n\" % load.iterations)", "'0': error_msg = \"HTTP %s\" % r_code else: error_msg =", "% base_props_file) # base props base_props = self.settings.get(\"properties\") if base_props:", "in iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r, %r),\" % (header, value), indent=4)) self.root.append(self.gen_statement(\"])\", indent=0))", "= \"\" self.start_time = 0 self.end_time = 0 self.concurrency =", "matched: continue if worker_id == matched.group(1) and r_code == matched.group(3)", "self.widget = ExecutorWidget(self, label) if self.get_load().ramp_up: self.widget.duration += self.get_load().ramp_up #", "method = req.method.upper() url = req.url local_headers = req.headers params", "contents = fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) if", "= self.get_script_path() if not self.script: if \"requests\" in scenario: self.script", "Should start the tool as fast as possible. \"\"\" self.env.set({\"T_GRINDER_PREFIX\":", "self.get_load().ramp_up # because we have ramp-down equal to rampup return", "bytes_count) if int(data_fields[self.idx[\"Errors\"]]) or int(data_fields[self.idx['HTTP response errors']]): if not error_msg:", "test name records continue line = line[line.find(' '):] header_list =", "contents = fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) return", "fds.write(\"# Base Properies File Start: %s\\n\" % base_props_file) with open(base_props_file)", "= self.process.poll() if self.retcode is not None: if self.retcode !=", "\".out\"), \"w\") self.stderr = open(self.engine.create_artifact(\"grinder\", \".err\"), \"w\") self.install_required_tools() scenario =", "@staticmethod def __list_to_nvpair_list(items): return \"[\" + \",\".join(\"NVPair(%r, %r)\" % (header,", "sleep_method.append(self.gen_statement(\"sleep_time = int(1000 * grinder.threadNumber * inc)\")) sleep_method.append(self.gen_statement(\"grinder.sleep(sleep_time, 0)\")) sleep_method.append(self.gen_statement(\"if", "MirrorsManager, dehumanize_time, get_full_path, PythonGenerator, CALL_PROBLEMS from bzt.utils import unzip, RequiredTool,", "(\\d+) bytes\") def __init__(self, filename, parent_logger): super(DataLogReader, self).__init__() self.report_by_url =", "line = line[line.find(' '):] header_list = line.strip().split(self.DELIMITER) for _ix, field", "= GrinderScriptBuilder(self.get_scenario(), self.log) builder.label = self.label builder.build_source_code() builder.save(script) return script", "diagnostics = [] if self.stdout is not None: with open(self.stdout.name)", "= self.lines[lineNo].strip() matched = self.DETAILS_REGEX.match(line) if not matched: continue if", "def shutdown(self): \"\"\" If tool is still running - let's", "fds: :return: \"\"\" fds.write(\"# BZT Properies Start\\n\") fds.write(\"grinder.hostID=%s\\n\" % self.exec_id)", "self.stdout = open(self.engine.create_artifact(\"grinder\", \".out\"), \"w\") self.stderr = open(self.engine.create_artifact(\"grinder\", \".err\"), \"w\")", "% load.concurrency) fds.write(\"taurus.throughput=%s\\n\" % load.throughput) fds.write(\"taurus.ramp_up=%s\\n\" % load.ramp_up) fds.write(\"taurus.steps=%s\\n\" %", "if not self.script: if \"requests\" in scenario: self.script = self.__scenario_from_requests()", "t_stamp = int(int(data_fields[self.idx[\"Start time (ms since Epoch)\"]]) / 1000.0) r_time", "must be a script file or requests for its generation", "headers = self.scenario.get_headers() if not self.scenario.get(\"keepalive\", True): headers['Connection'] = 'close'", "label, self.concurrency, r_time, con_time, latency, r_code, error_msg, source_id, bytes_count def", "% base_props_file) with open(base_props_file) as bpf: fds.write(bpf.read()) fds.write(\"# Base Properies", "install %s into %s\", self.tool_name, dest) grinder_dist = self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping", "grinder from net.grinder.plugin.http import HTTPRequest, HTTPPluginControl, HTTPUtilities from HTTPClient import", "RESOURCES_DIR class GrinderExecutor(ScenarioExecutor, WidgetProvider, FileLister, HavingInstallableTools, SelfDiagnosable): \"\"\" Grinder executor", "self.root.append(self.gen_statement(\"])\", indent=0)) global_timeout = dehumanize_time(self.scenario.get(\"timeout\", None)) if global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" %", "builder.build_source_code() builder.save(script) return script def install_required_tools(self): grinder = self._get_tool(Grinder, config=self.settings)", "= open(self.engine.create_artifact(\"grinder\", \".out\"), \"w\") self.stderr = open(self.engine.create_artifact(\"grinder\", \".err\"), \"w\") self.install_required_tools()", "specific language governing permissions and limitations under the License. \"\"\"", "self.root.append(self.gen_comment(\"This script was generated by Taurus\", indent=0)) self.root.append(self.add_imports()) self.root.append(self.gen_new_line()) default_address", "self.scenario.get_headers() if not self.scenario.get(\"keepalive\", True): headers['Connection'] = 'close' if headers:", "= [self._get_tool(TclLibrary), self._get_tool(JavaVM), self.java_helper, grinder] for tool in required_tools: if", "applicable law or agreed to in writing, software distributed under", "last_pass: \"\"\" self.log.debug(\"Reading grinder results...\") self.lines = list(self.file.get_lines(size=1024 * 1024,", "data_fields, worker_id = self.__split(line) if not data_fields: self.log.debug(\"Skipping line: %s\",", "self.kpi_file: self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor, self).post_process() def __scenario_from_requests(self): \"\"\" Generate grinder scenario", "test_id = line_parts[5][:-1] test_name = ' '.join(line_parts[6:]) self.test_names[test_id] = test_name", "HTTPRequest): msg = \"Grinder script generator doesn't support '%s' blocks,", "= open(self.engine.create_artifact(\"grinder\", \".err\"), \"w\") self.install_required_tools() scenario = self.get_scenario() self.exec_id =", "= \"Test #%s\" % test_id source_id = '' # maybe", "\",\" DETAILS_REGEX = re.compile(r\"worker\\.(\\S+) (.+) -> (\\S+) (.+), (\\d+) bytes\")", "with non-zero code: %s\" % self.retcode, self.get_error_diagnostics()) return True return", "self.java_helper, grinder] for tool in required_tools: if not tool.check_if_installed(): tool.install()", "self.log.debug('Total mirrors: %d', len(links)) return links class GrinderScriptBuilder(PythonGenerator): IMPORTS =", "= [] if self.stdout is not None: with open(self.stdout.name) as", "\",\".join(\"NVPair(%r, %r)\" % (header, value) for header, value in items)", "headers = self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r, %s, %s)\" % (method, url, params,", "reversed(range(max(lnum - 100, 0), lnum)): # looking max 100 lines", "_ix, field in enumerate(header_list): self.idx[field.strip()] = _ix data_fields, worker_id =", "\"to run Grinder tool (%s)\" % self.execution.get('scenario') raise TaurusConfigError(msg) self.properties_file", "OF ANY KIND, either express or implied. See the License", "/ duration) def parse_line(self, data_fields, worker_id, lnum): worker_id = worker_id.split('.')[1]", "=> %s\", test_id, test_name) return None, None worker_id = line[:line.find('", "% load.iterations) fds.write(\"# BZT Properies End\\n\") def prepare(self): self.stdout =", "not None: label = \"Grinder: %s\" % os.path.basename(self.script) else: label", "int(data_fields[self.idx[\"Errors\"]]) or int(data_fields[self.idx['HTTP response errors']]): if not error_msg: if r_code", "indent=0)) self.root.append(self.gen_statement(\"utilities = HTTPPluginControl.getHTTPUtilities()\", indent=0)) headers = self.scenario.get_headers() if not", "to rampup return self.widget def resource_files(self): resource_files = [] script_file_path", "raise ToolError(\"Unable to run %s after installation!\" % self.tool_name) class", "url = req.url local_headers = req.headers params = \"[]\" headers", "%s seconds\", self.end_time - self.start_time) def post_process(self): \"\"\" Collect data", "load.iterations or 0) if load.concurrency: fds.write(\"grinder.threads=%s\\n\" % load.concurrency) if load.duration:", "tool (%s)\" % self.execution.get('scenario') raise TaurusConfigError(msg) self.properties_file = self.engine.create_artifact(\"grinder\", \".properties\")", "grinder] for tool in required_tools: if not tool.check_if_installed(): tool.install() def", "import iteritems from bzt.utils import MirrorsManager, dehumanize_time, get_full_path, PythonGenerator, CALL_PROBLEMS", "self.log.debug(\"%s stdout: %s\", self.tool_name, out) return True except CALL_PROBLEMS as", "load.hold) fds.write(\"taurus.iterations=%s\\n\" % load.iterations) fds.write(\"# BZT Properies End\\n\") def prepare(self):", "speed: %s lines/s\", (lnum + 1) / duration) def parse_line(self,", "exited with non-zero code: %s\" % self.retcode, self.get_error_diagnostics()) return True", "0.001 self.log.debug(\"Log reading speed: %s lines/s\", (lnum + 1) /", "fds, scenario): \"\"\" Write scenario props and scenario file props", "= re.compile(r\"worker\\.(\\S+) (.+) -> (\\S+) (.+), (\\d+) bytes\") def __init__(self,", "ramp-down equal to rampup return self.widget def resource_files(self): resource_files =", "ToolError from bzt.engine import ScenarioExecutor, FileLister, HavingInstallableTools, SelfDiagnosable from bzt.modules.aggregator", "def __init__(self, http_client, parent_logger, grinder_version): self.grinder_version = grinder_version base_link =", "\" msg += \"to run Grinder tool (%s)\" % self.execution.get('scenario')", "(method, url, params, headers))) think_time = dehumanize_time(req.priority_option('think-time')) if think_time: main_method.append(self.gen_statement(\"grinder.sleep(%s)\"", "resource_files = [] script_file_path = self.get_script_path() if script_file_path: resource_files.append(script_file_path) prop_file", "time\"]]) / 1000.0 latency = int(data_fields[self.idx[\"Time to first byte\"]]) /", "rampup return self.widget def resource_files(self): resource_files = [] script_file_path =", "file props to fds :param fds: :param scenario: dict :return:", "scenario: self.script = self.__scenario_from_requests() else: msg = \"There must be", "import grinder from net.grinder.plugin.http import HTTPRequest, HTTPPluginControl, HTTPUtilities from HTTPClient", "gen_runner_class(self): runner_classdef = self.gen_class_definition(\"TestRunner\", [\"object\"]) sleep_method = self.gen_method_definition(\"rampUpSleeper\", [\"self\"]) sleep_method.append(self.gen_statement(\"if", "r_time, con_time, latency, r_code, error_msg, source_id, bytes_count def __split(self, line):", "import HTTPRequest from bzt.six import iteritems from bzt.utils import MirrorsManager,", "processes (logback-worker.xml) self.env.add_path({\"CLASSPATH\": RESOURCES_DIR}, finish=True) self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path}, finish=True) self.env.add_path({\"CLASSPATH\": self.settings.get(\"path\",", "otherwise. :return: bool :raise TaurusToolError: \"\"\" self.retcode = self.process.poll() if", "__write_scenario_props(self, fds, scenario): \"\"\" Write scenario props and scenario file", "= self.engine.create_artifact(\"grinder\", \".properties\") with open(self.properties_file, 'w') as fds: self.__write_base_props(fds) self.__write_scenario_props(fds,", "line_parts[5][:-1] test_name = ' '.join(line_parts[6:]) self.test_names[test_id] = test_name self.log.debug(\"Recognized test", "exception calling TestRunner\" else: error_msg = None # suppress errors", "+= 1 pass elif line_parts[1] == 'finished': if self.concurrency >", "self.tool_name, dest) grinder_dist = self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping %s\", grinder_dist) unzip(grinder_dist, dest,", "Taurus\", indent=0)) self.root.append(self.add_imports()) self.root.append(self.gen_new_line()) default_address = self.scenario.get(\"default-address\") url_arg = \"url=%r\"", "TclLibrary, FileReader, RESOURCES_DIR class GrinderExecutor(ScenarioExecutor, WidgetProvider, FileLister, HavingInstallableTools, SelfDiagnosable): \"\"\"", "BZT Properies Start\\n\") fds.write(\"grinder.hostID=%s\\n\" % self.exec_id) fds.write(\"grinder.script=%s\\n\" % self.script.replace(os.path.sep, \"/\"))", "None self.widget = ExecutorWidget(self, label) if self.get_load().ramp_up: self.widget.duration += self.get_load().ramp_up", "return self.widget def resource_files(self): resource_files = [] script_file_path = self.get_script_path()", "/ 1000.0 latency = int(data_fields[self.idx[\"Time to first byte\"]]) / 1000.0", "self.known_threads: self.known_threads.add(thread_id) self.concurrency += 1 url, error_msg = self.__parse_prev_lines(worker_id, lnum,", "grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc = tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)\")) sleep_method.append(self.gen_statement(\"sleep_time = int(1000 *", "Apache License, Version 2.0 (the \"License\"); you may not use", "main_method.append(self.gen_statement(\"request.%s(%r, %s, %s)\" % (method, url, params, headers))) think_time =", "start if duration < 0.001: duration = 0.001 self.log.debug(\"Log reading", "(%s)\" % self.execution.get('scenario') raise TaurusConfigError(msg) self.properties_file = self.engine.create_artifact(\"grinder\", \".properties\") with", "bool :raise TaurusToolError: \"\"\" self.retcode = self.process.poll() if self.retcode is", "Grinder executor module \"\"\" def __init__(self): super(GrinderExecutor, self).__init__() self.script =", "if not self.idx: if not line.startswith('data.'): self.__split(line) # to capture", "sleep_method.append(self.gen_statement(\"if sleep_time: grinder.logger.info('slept for %sms' % sleep_time)\")) sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No sleep", "%s\", self.tool_name, self.tool_path) try: out, err = self.call([\"java\", \"-classpath\", self.tool_path,", "TaurusConfigError, ToolError from bzt.engine import ScenarioExecutor, FileLister, HavingInstallableTools, SelfDiagnosable from", "\"\"\" Generate grinder scenario from requests :return: script \"\"\" script", "blocks, skipping\" self.log.warning(msg, req.NAME) continue method = req.method.upper() url =", "configurations used by worker processes (logback-worker.xml) self.env.add_path({\"CLASSPATH\": RESOURCES_DIR}, finish=True) self.env.add_path({\"CLASSPATH\":", "self.report_by_url: label = url elif test_id in self.test_names: label =", "self.env.set({\"T_GRINDER_PREFIX\": self.exec_id}) self.process = self.execute(self.cmd_line) def check(self): \"\"\" Checks if", "import NVPair \"\"\" def __init__(self, scenario, parent_logger): super(GrinderScriptBuilder, self).__init__(scenario, parent_logger)", "scenario = self.get_scenario() self.exec_id = self.label self.script = self.get_script_path() if", "self.script = None self.exec_id = \"grinder-bzt-%s\" % id(self) self.properties_file =", "% (header, value), indent=4)) self.root.append(self.gen_statement(\"])\", indent=0)) global_timeout = dehumanize_time(self.scenario.get(\"timeout\", None))", "int(data_fields[self.idx[\"HTTP response length\"]].strip()) test_id = data_fields[self.idx[\"Test\"]].strip() thread_id = worker_id +", "= time.time() - start if duration < 0.001: duration =", "thread_id not in self.known_threads: self.known_threads.add(thread_id) self.concurrency += 1 url, error_msg", "self._get_tool(JavaVM), self.java_helper, grinder] for tool in required_tools: if not tool.check_if_installed():", "import Test from net.grinder.script.Grinder import grinder from net.grinder.plugin.http import HTTPRequest,", "sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No sleep needed')\")) sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method) main_method = self.gen_method_definition(\"__call__\", [\"self\"])", "(.+), (\\d+) bytes\") def __init__(self, filename, parent_logger): super(DataLogReader, self).__init__() self.report_by_url", "self.widget.duration += self.get_load().ramp_up # because we have ramp-down equal to", "not in links: links.append(default_link) self.log.debug('Total mirrors: %d', len(links)) return links", "\"%s\")' % self.label, indent=0)) self.root.append(self.gen_statement('test.record(request)', indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_statement(\"defaults = HTTPPluginControl.getConnectionDefaults()\",", "fds.write(\"grinder.runs=%s\\n\" % load.iterations or 0) if load.concurrency: fds.write(\"grinder.threads=%s\\n\" % load.concurrency)", "Start: %s\\n\" % base_props_file) with open(base_props_file) as bpf: fds.write(bpf.read()) fds.write(\"#", "\"\"\" If tool is still running - let's stop it.", "generated by Taurus\", indent=0)) self.root.append(self.add_imports()) self.root.append(self.gen_new_line()) default_address = self.scenario.get(\"default-address\") url_arg", "import TaurusJavaHelper from bzt.requests_model import HTTPRequest from bzt.six import iteritems", "None def __write_base_props(self, fds): \"\"\" write base properties and base", "= self.DETAILS_REGEX.match(line) if not matched: continue if worker_id == matched.group(1)", "= None self.kpi_file = None self.cmd_line = None self.process =", "line_parts = line.split(' ') if len(line_parts) > 1: if line_parts[1]", "or 0) if load.concurrency: fds.write(\"grinder.threads=%s\\n\" % load.concurrency) if load.duration: fds.write(\"grinder.duration=%s\\n\"", "with open(script_props_file) as spf: fds.write(spf.read()) fds.write(\"# Script Properies File End:", "self.end_time = None self.retcode = None self.java_helper = None def", "# scenario props local_props = scenario.get(\"properties\") if local_props: fds.write(\"# Scenario", "sleep_method = self.gen_method_definition(\"rampUpSleeper\", [\"self\"]) sleep_method.append(self.gen_statement(\"if grinder.runNumber != 0: return\")) sleep_method.append(self.gen_statement(\"tprops", "matched = self.DETAILS_REGEX.match(line) if not matched: continue if worker_id ==", "need them\\n\") fds.write(\"taurus.concurrency=%s\\n\" % load.concurrency) fds.write(\"taurus.throughput=%s\\n\" % load.throughput) fds.write(\"taurus.ramp_up=%s\\n\" %", "Also checks if resulting logs contains any data and throws", "line.startswith('data.'): line_parts = line.split(' ') if len(line_parts) > 1: if", "self.settings[\"path\"] = grinder.tool_path self.java_helper = self._get_tool(TaurusJavaHelper) required_tools = [self._get_tool(TclLibrary), self._get_tool(JavaVM),", "not self.widget: if self.script is not None: label = \"Grinder:", "scenario: dict :return: \"\"\" script_props_file = scenario.get(\"properties-file\") if script_props_file: fds.write(\"#", "self.retcode is not None: if self.retcode != 0: raise ToolError(\"Gatling", "{} self.partial_buffer = \"\" self.start_time = 0 self.end_time = 0", "line = line[line.find(' '):] data_fields = line.split(self.DELIMITER) if not data_fields[1].strip().isdigit():", "time.time() self.log.debug(\"Grinder worked for %s seconds\", self.end_time - self.start_time) def", "1000.0 r_code = data_fields[self.idx[\"HTTP response code\"]].strip() con_time = int(data_fields[self.idx[\"Time to", "= DataLogReader(self.kpi_file, self.log) self.reader.report_by_url = self.settings.get(\"report-by-url\", False) if isinstance(self.engine.aggregator, ConsolidatingAggregator):", "open(self.properties_file, 'w') as fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds) self.kpi_file =", "% load.iterations or 0) if load.concurrency: fds.write(\"grinder.threads=%s\\n\" % load.concurrency) if", "if global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" % int(global_timeout * 1000), indent=0)) cookie_flag =", "def startup(self): \"\"\" Should start the tool as fast as", "fds.write(bpf.read()) fds.write(\"# Base Properies File End: %s\\n\\n\" % base_props_file) #", "msg = \"Grinder script generator doesn't support '%s' blocks, skipping\"", "continue method = req.method.upper() url = req.url local_headers = req.headers", "mirrors...') base_link = \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\ \"-binary.zip/download?use_mirror={mirror}\" li_search_pattern = re.compile(r'<li id=\".*?\">')", "li_search_pattern.findall(self.page_source) if li_elements: links = [base_link.format(version=self.grinder_version, mirror=link.strip('<li id=\"').strip('\">')) for link", "from maven and convert to JarTool(?) VERSION = \"3.11\" LOCAL_PATH", "BlazeMeter Inc. Licensed under the Apache License, Version 2.0 (the", "if script_props_file: fds.write(\"# Script Properies File Start: %s\\n\" % script_props_file)", "= FileReader(filename=filename, parent_logger=self.log) self.idx = {} self.partial_buffer = \"\" self.start_time", "to fds :param fds: fds :return: \"\"\" base_props_file = self.settings.get(\"properties-file\")", "li_elements: links = [base_link.format(version=self.grinder_version, mirror=link.strip('<li id=\"').strip('\">')) for link in li_elements]", "None self.java_helper = None def __write_base_props(self, fds): \"\"\" write base", "HavingInstallableTools, SelfDiagnosable from bzt.modules.aggregator import ConsolidatingAggregator, ResultsReader from bzt.modules.console import", "fds :param fds: :return: \"\"\" fds.write(\"# BZT Properies Start\\n\") fds.write(\"grinder.hostID=%s\\n\"", "get_full_path, PythonGenerator, CALL_PROBLEMS from bzt.utils import unzip, RequiredTool, JavaVM, shutdown_process,", "+ contents) return diagnostics class DataLogReader(ResultsReader): \"\"\" Class to read", "= ' '.join(line_parts[6:]) self.test_names[test_id] = test_name self.log.debug(\"Recognized test id %s", "File Start: %s\\n\" % script_props_file) with open(script_props_file) as spf: fds.write(spf.read())", "% load.ramp_up) fds.write(\"taurus.steps=%s\\n\" % load.steps) fds.write(\"taurus.hold_for=%s\\n\" % load.hold) fds.write(\"taurus.iterations=%s\\n\" %", "logback configurations used by worker processes (logback-worker.xml) self.env.add_path({\"CLASSPATH\": RESOURCES_DIR}, finish=True)", "open(self.engine.create_artifact(\"grinder\", \".err\"), \"w\") self.install_required_tools() scenario = self.get_scenario() self.exec_id = self.label", "worker_id def __parse_prev_lines(self, worker_id, lnum, r_code, bytes_count): url = ''", "import WidgetProvider, ExecutorWidget from bzt.modules.java import TaurusJavaHelper from bzt.requests_model import", "Scenario Properies End\\n\\n\") def __write_bzt_props(self, fds): \"\"\" Write bzt properties", "is not None: duration = time.time() - start if duration", "Start: %s\\n\" % script_props_file) with open(script_props_file) as spf: fds.write(spf.read()) fds.write(\"#", "not None: if self.retcode != 0: raise ToolError(\"Gatling tool exited", "data log \"\"\" DELIMITER = \",\" DETAILS_REGEX = re.compile(r\"worker\\.(\\S+) (.+)", "load.concurrency) fds.write(\"taurus.throughput=%s\\n\" % load.throughput) fds.write(\"taurus.ramp_up=%s\\n\" % load.ramp_up) fds.write(\"taurus.steps=%s\\n\" % load.steps)", "post_process(self): \"\"\" Collect data file artifact \"\"\" if self.kpi_file: self.engine.existing_artifact(self.kpi_file)", "+ '/' + data_fields[self.idx[\"Thread\"]].strip() if thread_id not in self.known_threads: self.known_threads.add(thread_id)", "if int(data_fields[self.idx[\"Errors\"]]) or int(data_fields[self.idx['HTTP response errors']]): if not error_msg: if", "= data_fields[self.idx[\"HTTP response code\"]].strip() con_time = int(data_fields[self.idx[\"Time to resolve host\"]])", "script_file_path: resource_files.append(script_file_path) prop_file = self.get_scenario().get(\"properties-file\") if prop_file: resource_files.append(prop_file) return resource_files", "for lineNo in reversed(range(max(lnum - 100, 0), lnum)): # looking", "self.partial_buffer = \"\" self.start_time = 0 self.end_time = 0 self.concurrency", "'' # maybe use worker_id somehow? return t_stamp, label, self.concurrency,", "dehumanize_time(self.scenario.get(\"timeout\", None)) if global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" % int(global_timeout * 1000), indent=0))", "config=self.settings) self.settings[\"path\"] = grinder.tool_path self.java_helper = self._get_tool(TaurusJavaHelper) required_tools = [self._get_tool(TclLibrary),", "[base_link.format(version=self.grinder_version, mirror=link.strip('<li id=\"').strip('\">')) for link in li_elements] default_link = self.DOWNLOAD_LINK.format(version=self.grinder_version)", "self.engine.aggregator.add_underling(self.reader) # add logback configurations used by worker processes (logback-worker.xml)", "+ contents) if self.stderr is not None: with open(self.stderr.name) as", "HTTPUtilities from HTTPClient import NVPair \"\"\" def __init__(self, scenario, parent_logger):", "fds.write(\"taurus.throughput=%s\\n\" % load.throughput) fds.write(\"taurus.ramp_up=%s\\n\" % load.ramp_up) fds.write(\"taurus.steps=%s\\n\" % load.steps) fds.write(\"taurus.hold_for=%s\\n\"", "/ 1000.0 bytes_count = int(data_fields[self.idx[\"HTTP response length\"]].strip()) test_id = data_fields[self.idx[\"Test\"]].strip()", "End\\n\\n\") def __write_bzt_props(self, fds): \"\"\" Write bzt properties to fds", "software distributed under the License is distributed on an \"AS", "base props base_props = self.settings.get(\"properties\") if base_props: fds.write(\"# Base Properies", "= settings.get(\"download-link\", \"\") super(Grinder, self).__init__(tool_path=grinder_path, download_link=download_link, **kwargs) self.version = self.VERSION", "self.file = FileReader(filename=filename, parent_logger=self.log) self.idx = {} self.partial_buffer = \"\"", "self.reader = DataLogReader(self.kpi_file, self.log) self.reader.report_by_url = self.settings.get(\"report-by-url\", False) if isinstance(self.engine.aggregator,", "bytes\") def __init__(self, filename, parent_logger): super(DataLogReader, self).__init__() self.report_by_url = False", "%s => %s\", test_id, test_name) return None, None worker_id =", "error_msg class Grinder(RequiredTool): # todo: take it from maven and", "resolve host\"]]) / 1000.0 con_time += int(data_fields[self.idx[\"Time to establish connection\"]])", "\"BZT Requests\" def build_source_code(self): self.log.debug(\"Generating Python script for Grinder\") self.root.append(self.gen_comment(\"This", "enumerate(self.lines): if not self.idx: if not line.startswith('data.'): self.__split(line) # to", "None # suppress errors if self.report_by_url: label = url elif", "load.concurrency: fds.write(\"grinder.threads=%s\\n\" % load.concurrency) if load.duration: fds.write(\"grinder.duration=%s\\n\" % int(load.duration *", "DataLogReader(ResultsReader): \"\"\" Class to read KPI from data log \"\"\"", "= \"BZT Requests\" def build_source_code(self): self.log.debug(\"Generating Python script for Grinder\")", "class Grinder(RequiredTool): # todo: take it from maven and convert", "= None self.exec_id = \"grinder-bzt-%s\" % id(self) self.properties_file = None", "\"requests\" in scenario: self.script = self.__scenario_from_requests() else: msg = \"There", "self.concurrency, r_time, con_time, latency, r_code, error_msg, source_id, bytes_count def __split(self,", "% cookie_flag, indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class()) @staticmethod def __list_to_nvpair_list(items): return \"[\"", "= grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc = tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)\")) sleep_method.append(self.gen_statement(\"sleep_time = int(1000", "grinder_dist) unzip(grinder_dist, dest, 'grinder-' + self.version) os.remove(grinder_dist) self.log.info(\"Installed grinder successfully\")", "as fds: contents = fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" +", "= config or {} grinder_path = settings.get(\"path\", self.LOCAL_PATH) grinder_path =", "self.engine.artifacts_dir.replace(os.path.sep, \"/\")) load = self.get_load() if load.iterations or load.concurrency: fds.write(\"grinder.runs=%s\\n\"", "key, val in iteritems(base_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Base", "GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE = \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK = \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\", "latency, r_code, error_msg, source_id, bytes_count def __split(self, line): if not", "dest) grinder_dist = self._download(use_link=bool(self.download_link)) self.log.info(\"Unzipping %s\", grinder_dist) unzip(grinder_dist, dest, 'grinder-'", "self.start_time = 0 self.end_time = 0 self.concurrency = 0 self.test_names", "scenario props local_props = scenario.get(\"properties\") if local_props: fds.write(\"# Scenario Properies", "links: links.append(default_link) self.log.debug('Total mirrors: %d', len(links)) return links class GrinderScriptBuilder(PythonGenerator):", "if line_parts[1] == 'starting,': # self.concurrency += 1 pass elif", "if not line.startswith('data.'): line_parts = line.split(' ') if len(line_parts) >", "return matched.group(2), matched.group(4) return url, error_msg class Grinder(RequiredTool): # todo:", "running. Also checks if resulting logs contains any data and", "self).__init__(tool_path=grinder_path, download_link=download_link, **kwargs) self.version = self.VERSION self.mirror_manager = GrinderMirrorsManager(self.http_client, self.log,", "= data_fields[self.idx[\"Test\"]].strip() thread_id = worker_id + '/' + data_fields[self.idx[\"Thread\"]].strip() if", "todo: take it from maven and convert to JarTool(?) VERSION", "self.log.debug(\"Generating Python script for Grinder\") self.root.append(self.gen_comment(\"This script was generated by", "needed')\")) sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method) main_method = self.gen_method_definition(\"__call__\", [\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for req", "sleep needed')\")) sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method) main_method = self.gen_method_definition(\"__call__\", [\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for", "if not self.widget: if self.script is not None: label =", "somehow? return t_stamp, label, self.concurrency, r_time, con_time, latency, r_code, error_msg,", "TaurusConfigError(msg) self.properties_file = self.engine.create_artifact(\"grinder\", \".properties\") with open(self.properties_file, 'w') as fds:", "\"]\" def gen_runner_class(self): runner_classdef = self.gen_class_definition(\"TestRunner\", [\"object\"]) sleep_method = self.gen_method_definition(\"rampUpSleeper\",", "__init__(self, config=None, **kwargs): settings = config or {} grinder_path =", "contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents) return diagnostics class DataLogReader(ResultsReader): \"\"\"", "if load.duration: fds.write(\"grinder.duration=%s\\n\" % int(load.duration * 1000)) fds.write(\"# taurus load", "= self.engine.create_artifact(\"grinder_requests\", \".py\") builder = GrinderScriptBuilder(self.get_scenario(), self.log) builder.label = self.label", "self.parse_line(data_fields, worker_id, lnum) if lnum is not None: duration =", "= self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager, self).__init__(http_client, base_link, parent_logger) def _parse_mirrors(self): links =", "return resource_files def get_error_diagnostics(self): diagnostics = [] if self.stdout is", "None)}, finish=True) self.cmd_line = [\"java\", \"net.grinder.Grinder\", self.properties_file] def startup(self): \"\"\"", "__split(self, line): if not line.endswith(\"\\n\"): self.partial_buffer += line return None,", "= '' error_msg = None for lineNo in reversed(range(max(lnum -", "') if len(line_parts) > 1: if line_parts[1] == 'starting,': #", "\"There must be a script file or requests for its", "self.properties_file] def startup(self): \"\"\" Should start the tool as fast", "+ \"-kpi.log\") self.reader = DataLogReader(self.kpi_file, self.log) self.reader.report_by_url = self.settings.get(\"report-by-url\", False)", "to in writing, software distributed under the License is distributed", "local_headers = req.headers params = \"[]\" headers = self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r,", "== 'starting,': # self.concurrency += 1 pass elif line_parts[1] ==", "to establish connection\"]]) / 1000.0 bytes_count = int(data_fields[self.idx[\"HTTP response length\"]].strip())", "if duration < 0.001: duration = 0.001 self.log.debug(\"Log reading speed:", "return None, None return data_fields, worker_id def __parse_prev_lines(self, worker_id, lnum,", "def __list_to_nvpair_list(items): return \"[\" + \",\".join(\"NVPair(%r, %r)\" % (header, value)", "= '' # maybe use worker_id somehow? return t_stamp, label,", "self.concurrency -= 1 elif set(line_parts[1:5]) == {'Test', 'name', 'for', 'ID'}:", "self.version = self.VERSION self.mirror_manager = GrinderMirrorsManager(self.http_client, self.log, self.version) def check_if_installed(self):", "lines back. TODO: parameterize? line = self.lines[lineNo].strip() matched = self.DETAILS_REGEX.match(line)", "%s\", self.tool_name, exc) return False def install(self): dest = get_full_path(self.tool_path,", "lnum, line in enumerate(self.lines): if not self.idx: if not line.startswith('data.'):", "None self.cmd_line = None self.process = None self.end_time = None", "is still running - let's stop it. \"\"\" shutdown_process(self.process, self.log)", "class GrinderExecutor(ScenarioExecutor, WidgetProvider, FileLister, HavingInstallableTools, SelfDiagnosable): \"\"\" Grinder executor module", "for the specific language governing permissions and limitations under the", "bytes_count = int(data_fields[self.idx[\"HTTP response length\"]].strip()) test_id = data_fields[self.idx[\"Test\"]].strip() thread_id =", "if not line.startswith('data.'): self.__split(line) # to capture early test name", "DELIMITER = \",\" DETAILS_REGEX = re.compile(r\"worker\\.(\\S+) (.+) -> (\\S+) (.+),", "the License. \"\"\" import os import re import time from", "self.kpi_file = None self.cmd_line = None self.process = None self.end_time", "scenario from requests :return: script \"\"\" script = self.engine.create_artifact(\"grinder_requests\", \".py\")", "log \"\"\" DELIMITER = \",\" DETAILS_REGEX = re.compile(r\"worker\\.(\\S+) (.+) ->", "= {} self.partial_buffer = \"\" self.start_time = 0 self.end_time =", "if self.script is not None: label = \"Grinder: %s\" %", "li_search_pattern = re.compile(r'<li id=\".*?\">') li_elements = li_search_pattern.findall(self.page_source) if li_elements: links", "open(self.stdout.name) as fds: contents = fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\"", "and limitations under the License. \"\"\" import os import re", "self.get_script_path() if not self.script: if \"requests\" in scenario: self.script =", "grinder_path = get_full_path(grinder_path) download_link = settings.get(\"download-link\", \"\") super(Grinder, self).__init__(tool_path=grinder_path, download_link=download_link,", "= self.scenario.get_headers() if not self.scenario.get(\"keepalive\", True): headers['Connection'] = 'close' if", "(lnum + 1) / duration) def parse_line(self, data_fields, worker_id, lnum):", "fds): \"\"\" Write bzt properties to fds :param fds: :return:", "1000.0 latency = int(data_fields[self.idx[\"Time to first byte\"]]) / 1000.0 r_code", "builder.save(script) return script def install_required_tools(self): grinder = self._get_tool(Grinder, config=self.settings) self.settings[\"path\"]", "grinder.tool_path self.java_helper = self._get_tool(TaurusJavaHelper) required_tools = [self._get_tool(TclLibrary), self._get_tool(JavaVM), self.java_helper, grinder]", "str(int(time.time())) + \"&use_mirror=autoselect\" def __init__(self, http_client, parent_logger, grinder_version): self.grinder_version =", "None, None worker_id = line[:line.find(' ')] line = line[line.find(' '):]", "= self.call([\"java\", \"-classpath\", self.tool_path, \"net.grinder.Grinder\"]) if err: out += err", "headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0)) for header, value in iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r, %r),\"", "capture early test name records continue line = line[line.find(' '):]", "= url elif test_id in self.test_names: label = self.test_names[test_id] else:", "def __write_scenario_props(self, fds, scenario): \"\"\" Write scenario props and scenario", "def check_if_installed(self): self.log.debug(\"Trying %s: %s\", self.tool_name, self.tool_path) try: out, err", "fds.write(\"taurus.ramp_up=%s\\n\" % load.ramp_up) fds.write(\"taurus.steps=%s\\n\" % load.steps) fds.write(\"taurus.hold_for=%s\\n\" % load.hold) fds.write(\"taurus.iterations=%s\\n\"", "fds): \"\"\" write base properties and base properties file contents", "line = \"%s%s\" % (self.partial_buffer, line) self.partial_buffer = \"\" line", "to read KPI from data log \"\"\" DELIMITER = \",\"", "for key, val in iteritems(local_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"#", "diagnostics class DataLogReader(ResultsReader): \"\"\" Class to read KPI from data", "local_props: fds.write(\"# Scenario Properies Start\\n\") for key, val in iteritems(local_props):", "sleep_method.append(self.gen_statement(\"if grinder.runNumber != 0: return\")) sleep_method.append(self.gen_statement(\"tprops = grinder.properties.getPropertySubset('taurus.')\")) sleep_method.append(self.gen_statement(\"inc =", "%s\" % r_code else: error_msg = \"Java exception calling TestRunner\"", "self.cmd_line = None self.process = None self.end_time = None self.retcode", "\"\"\" Module holds all stuff regarding Grinder tool usage Copyright", "self.scenario.get_requests(): if not isinstance(req, HTTPRequest): msg = \"Grinder script generator", "None, None line = \"%s%s\" % (self.partial_buffer, line) self.partial_buffer =", "last_pass=last_pass)) lnum = None start = time.time() for lnum, line", "by worker processes (logback-worker.xml) self.env.add_path({\"CLASSPATH\": RESOURCES_DIR}, finish=True) self.env.add_path({\"CLASSPATH\": self.java_helper.tool_path}, finish=True)", "net.grinder.script import Test from net.grinder.script.Grinder import grinder from net.grinder.plugin.http import", "params = \"[]\" headers = self.__list_to_nvpair_list(iteritems(local_headers)) main_method.append(self.gen_statement(\"request.%s(%r, %s, %s)\" %", "%s after installation!\" % self.tool_name) class GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE = \"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\"", "bzt.six import iteritems from bzt.utils import MirrorsManager, dehumanize_time, get_full_path, PythonGenerator,", "unzip(grinder_dist, dest, 'grinder-' + self.version) os.remove(grinder_dist) self.log.info(\"Installed grinder successfully\") if", "self.__write_bzt_props(fds) self.kpi_file = os.path.join(self.engine.artifacts_dir, self.exec_id + \"-kpi.log\") self.reader = DataLogReader(self.kpi_file,", "\"\"\" shutdown_process(self.process, self.log) if self.start_time: self.end_time = time.time() self.log.debug(\"Grinder worked", "= line.strip().split(self.DELIMITER) for _ix, field in enumerate(header_list): self.idx[field.strip()] = _ix", "= 0 self.end_time = 0 self.concurrency = 0 self.test_names =", "worked for %s seconds\", self.end_time - self.start_time) def post_process(self): \"\"\"", "to capture early test name records continue line = line[line.find('", "% (header, value) for header, value in items) + \"]\"", "True): headers['Connection'] = 'close' if headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0)) for header,", "fds: contents = fds.read().strip() if contents.strip(): diagnostics.append(\"Grinder STDOUT:\\n\" + contents)", "equal to rampup return self.widget def resource_files(self): resource_files = []", "class DataLogReader(ResultsReader): \"\"\" Class to read KPI from data log", "License for the specific language governing permissions and limitations under", "self).__init__(scenario, parent_logger) self.label = \"BZT Requests\" def build_source_code(self): self.log.debug(\"Generating Python", "props base_props = self.settings.get(\"properties\") if base_props: fds.write(\"# Base Properies Start\\n\")", "bzt.requests_model import HTTPRequest from bzt.six import iteritems from bzt.utils import", "\"\"\" write base properties and base properties file contents to", "time from bzt import TaurusConfigError, ToolError from bzt.engine import ScenarioExecutor,", "File Start: %s\\n\" % base_props_file) with open(base_props_file) as bpf: fds.write(bpf.read())", "if not data_fields[1].strip().isdigit(): return None, None if len(data_fields) < max(self.idx.values()):", "main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for req in self.scenario.get_requests(): if not isinstance(req, HTTPRequest): msg", "grinder.logger.info('No sleep needed')\")) sleep_method.append(self.gen_new_line()) runner_classdef.append(sleep_method) main_method = self.gen_method_definition(\"__call__\", [\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\"))", "fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Base Properies End\\n\\n\") def __write_scenario_props(self,", "self.idx: if not line.startswith('data.'): self.__split(line) # to capture early test", "def install(self): dest = get_full_path(self.tool_path, step_up=2) self.log.info(\"Will install %s into", "# base props base_props = self.settings.get(\"properties\") if base_props: fds.write(\"# Base", "dehumanize_time(req.priority_option('think-time')) if think_time: main_method.append(self.gen_statement(\"grinder.sleep(%s)\" % int(think_time * 1000))) runner_classdef.append(main_method) return", "# because we have ramp-down equal to rampup return self.widget", "fds.write(\"grinder.duration=%s\\n\" % int(load.duration * 1000)) fds.write(\"# taurus load values in", "Collect data file artifact \"\"\" if self.kpi_file: self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor, self).post_process()", "DETAILS_REGEX = re.compile(r\"worker\\.(\\S+) (.+) -> (\\S+) (.+), (\\d+) bytes\") def", "links class GrinderScriptBuilder(PythonGenerator): IMPORTS = \"\"\" from net.grinder.script import Test", "is still running. Also checks if resulting logs contains any", "to run %s after installation!\" % self.tool_name) class GrinderMirrorsManager(MirrorsManager): MIRRORS_SOURCE", "not None: with open(self.stderr.name) as fds: contents = fds.read().strip() if", "fds.write(\"taurus.concurrency=%s\\n\" % load.concurrency) fds.write(\"taurus.throughput=%s\\n\" % load.throughput) fds.write(\"taurus.ramp_up=%s\\n\" % load.ramp_up) fds.write(\"taurus.steps=%s\\n\"", "__write_bzt_props(self, fds): \"\"\" Write bzt properties to fds :param fds:", "li_elements = li_search_pattern.findall(self.page_source) if li_elements: links = [base_link.format(version=self.grinder_version, mirror=link.strip('<li id=\"').strip('\">'))", "check_if_installed(self): self.log.debug(\"Trying %s: %s\", self.tool_name, self.tool_path) try: out, err =", "test_id in self.test_names: label = self.test_names[test_id] else: label = \"Test", "% load.hold) fds.write(\"taurus.iterations=%s\\n\" % load.iterations) fds.write(\"# BZT Properies End\\n\") def", "% int(global_timeout * 1000), indent=0)) cookie_flag = int(self.scenario.get(\"store-cookie\", True)) self.root.append(self.gen_statement(\"defaults.setUseCookies(%s)\"", "= \"\"\" from net.grinder.script import Test from net.grinder.script.Grinder import grinder", "self.retcode = None self.java_helper = None def __write_base_props(self, fds): \"\"\"", "grinder.logger.info('slept for %sms' % sleep_time)\")) sleep_method.append(self.gen_statement(\"else: grinder.logger.info('No sleep needed')\")) sleep_method.append(self.gen_new_line())", "response code\"]].strip() con_time = int(data_fields[self.idx[\"Time to resolve host\"]]) / 1000.0", "%s lines/s\", (lnum + 1) / duration) def parse_line(self, data_fields,", "latency = int(data_fields[self.idx[\"Time to first byte\"]]) / 1000.0 r_code =", "label = \"Grinder: %s\" % os.path.basename(self.script) else: label = None", "error_msg, source_id, bytes_count def __split(self, line): if not line.endswith(\"\\n\"): self.partial_buffer", "line[line.find(' '):] data_fields = line.split(self.DELIMITER) if not data_fields[1].strip().isdigit(): return None,", "exc: self.log.warning(\"%s check failed: %s\", self.tool_name, exc) return False def", "%s\", line.strip()) continue yield self.parse_line(data_fields, worker_id, lnum) if lnum is", "SelfDiagnosable from bzt.modules.aggregator import ConsolidatingAggregator, ResultsReader from bzt.modules.console import WidgetProvider,", "Properies Start\\n\") for key, val in iteritems(local_props): fds.write(\"%s=%s\\n\" % (key,", "True except CALL_PROBLEMS as exc: self.log.warning(\"%s check failed: %s\", self.tool_name,", "%s\", self.tool_name, out) return True except CALL_PROBLEMS as exc: self.log.warning(\"%s", "if headers: self.root.append(self.gen_statement(\"defaults.setDefaultHeaders([\", indent=0)) for header, value in iteritems(headers): self.root.append(self.gen_statement(\"NVPair(%r,", "by Taurus\", indent=0)) self.root.append(self.add_imports()) self.root.append(self.gen_new_line()) default_address = self.scenario.get(\"default-address\") url_arg =", "= None start = time.time() for lnum, line in enumerate(self.lines):", "[\"object\"]) sleep_method = self.gen_method_definition(\"rampUpSleeper\", [\"self\"]) sleep_method.append(self.gen_statement(\"if grinder.runNumber != 0: return\"))", "if thread_id not in self.known_threads: self.known_threads.add(thread_id) self.concurrency += 1 url,", "= dehumanize_time(self.scenario.get(\"timeout\", None)) if global_timeout: self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" % int(global_timeout * 1000),", "from bzt.modules.java import TaurusJavaHelper from bzt.requests_model import HTTPRequest from bzt.six", "lineNo in reversed(range(max(lnum - 100, 0), lnum)): # looking max", "as exc: self.log.warning(\"%s check failed: %s\", self.tool_name, exc) return False", "test_id, test_name) return None, None worker_id = line[:line.find(' ')] line", "if self.report_by_url: label = url elif test_id in self.test_names: label", "self.log.debug(\"Reading grinder results...\") self.lines = list(self.file.get_lines(size=1024 * 1024, last_pass=last_pass)) lnum", "fds.write(\"grinder.script=%s\\n\" % self.script.replace(os.path.sep, \"/\")) fds.write(\"grinder.logDirectory=%s\\n\" % self.engine.artifacts_dir.replace(os.path.sep, \"/\")) load =", "= line[:line.find(' ')] line = line[line.find(' '):] data_fields = line.split(self.DELIMITER)", "get_full_path(self.tool_path, step_up=2) self.log.info(\"Will install %s into %s\", self.tool_name, dest) grinder_dist", "if \"requests\" in scenario: self.script = self.__scenario_from_requests() else: msg =", "Properies Start\\n\") for key, val in iteritems(base_props): fds.write(\"%s=%s\\n\" % (key,", "shutdown_process, TclLibrary, FileReader, RESOURCES_DIR class GrinderExecutor(ScenarioExecutor, WidgetProvider, FileLister, HavingInstallableTools, SelfDiagnosable):", "End\\n\\n\") def __write_scenario_props(self, fds, scenario): \"\"\" Write scenario props and", "download_link = settings.get(\"download-link\", \"\") super(Grinder, self).__init__(tool_path=grinder_path, download_link=download_link, **kwargs) self.version =", "script file or requests for its generation \" msg +=", "cookie_flag, indent=0)) self.root.append(self.gen_new_line()) self.root.append(self.gen_runner_class()) @staticmethod def __list_to_nvpair_list(items): return \"[\" +", "= self.get_scenario() self.exec_id = self.label self.script = self.get_script_path() if not", "self.get_scenario().get(\"properties-file\") if prop_file: resource_files.append(prop_file) return resource_files def get_error_diagnostics(self): diagnostics =", "= self.__split(line) if not data_fields: self.log.debug(\"Skipping line: %s\", line.strip()) continue", "IMPORTS = \"\"\" from net.grinder.script import Test from net.grinder.script.Grinder import", "% r_code else: error_msg = \"Java exception calling TestRunner\" else:", "\"[\" + \",\".join(\"NVPair(%r, %r)\" % (header, value) for header, value", "= 0 self.concurrency = 0 self.test_names = {} self.known_threads =", "else: label = None self.widget = ExecutorWidget(self, label) if self.get_load().ramp_up:", "= GrinderMirrorsManager(self.http_client, self.log, self.version) def check_if_installed(self): self.log.debug(\"Trying %s: %s\", self.tool_name,", "%s\\n\\n\" % base_props_file) # base props base_props = self.settings.get(\"properties\") if", "label) if self.get_load().ramp_up: self.widget.duration += self.get_load().ramp_up # because we have", "since Epoch)\"]]) / 1000.0) r_time = int(data_fields[self.idx[\"Test time\"]]) / 1000.0", "DOWNLOAD_LINK = \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\ \"/grinder-{version}-binary.zip?r=&ts=\" + str(int(time.time())) + \"&use_mirror=autoselect\" def", "by applicable law or agreed to in writing, software distributed", "def install_required_tools(self): grinder = self._get_tool(Grinder, config=self.settings) self.settings[\"path\"] = grinder.tool_path self.java_helper", "self.log.debug(\"Trying %s: %s\", self.tool_name, self.tool_path) try: out, err = self.call([\"java\",", "1000.0 con_time += int(data_fields[self.idx[\"Time to establish connection\"]]) / 1000.0 bytes_count", "self.log) if self.start_time: self.end_time = time.time() self.log.debug(\"Grinder worked for %s", "line.split(' ') if len(line_parts) > 1: if line_parts[1] == 'starting,':", "if li_elements: links = [base_link.format(version=self.grinder_version, mirror=link.strip('<li id=\"').strip('\">')) for link in", "\"\"\" DELIMITER = \",\" DETAILS_REGEX = re.compile(r\"worker\\.(\\S+) (.+) -> (\\S+)", "properties to fds :param fds: :return: \"\"\" fds.write(\"# BZT Properies", "self.retcode, self.get_error_diagnostics()) return True return False def shutdown(self): \"\"\" If", "in reversed(range(max(lnum - 100, 0), lnum)): # looking max 100", "grinder_version): self.grinder_version = grinder_version base_link = self.MIRRORS_SOURCE.format(version=self.grinder_version) super(GrinderMirrorsManager, self).__init__(http_client, base_link,", "= int(data_fields[self.idx[\"HTTP response length\"]].strip()) test_id = data_fields[self.idx[\"Test\"]].strip() thread_id = worker_id", "None self.retcode = None self.java_helper = None def __write_base_props(self, fds):", "err self.log.debug(\"%s stdout: %s\", self.tool_name, out) return True except CALL_PROBLEMS", "'/' + data_fields[self.idx[\"Thread\"]].strip() if thread_id not in self.known_threads: self.known_threads.add(thread_id) self.concurrency", "back. TODO: parameterize? line = self.lines[lineNo].strip() matched = self.DETAILS_REGEX.match(line) if", "__init__(self, http_client, parent_logger, grinder_version): self.grinder_version = grinder_version base_link = self.MIRRORS_SOURCE.format(version=self.grinder_version)", "Grinder(RequiredTool): # todo: take it from maven and convert to", "early test name records continue line = line[line.find(' '):] header_list", "= line.split(self.DELIMITER) if not data_fields[1].strip().isdigit(): return None, None if len(data_fields)", "links = [base_link.format(version=self.grinder_version, mirror=link.strip('<li id=\"').strip('\">')) for link in li_elements] default_link", "\"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\ \"/grinder-{version}-binary.zip?r=&ts=\" + str(int(time.time())) + \"&use_mirror=autoselect\" def __init__(self, http_client,", "elif line_parts[1] == 'finished': if self.concurrency > 0: self.concurrency -=", "matched.group(4) return url, error_msg class Grinder(RequiredTool): # todo: take it", "r_code == matched.group(3) and str(bytes_count) == matched.group(5): return matched.group(2), matched.group(4)", "runner_classdef.append(sleep_method) main_method = self.gen_method_definition(\"__call__\", [\"self\"]) main_method.append(self.gen_statement(\"self.rampUpSleeper()\")) for req in self.scenario.get_requests():", "with the License. You may obtain a copy of the", "(ms since Epoch)\"]]) / 1000.0) r_time = int(data_fields[self.idx[\"Test time\"]]) /", "%s\" % os.path.basename(self.script) else: label = None self.widget = ExecutorWidget(self,", "taurus load values in case you need them\\n\") fds.write(\"taurus.concurrency=%s\\n\" %", "not error_msg: if r_code != '0': error_msg = \"HTTP %s\"", "os.remove(grinder_dist) self.log.info(\"Installed grinder successfully\") if not self.check_if_installed(): raise ToolError(\"Unable to", "if worker_id == matched.group(1) and r_code == matched.group(3) and str(bytes_count)", "t_stamp, label, self.concurrency, r_time, con_time, latency, r_code, error_msg, source_id, bytes_count", "= _ix data_fields, worker_id = self.__split(line) if not data_fields: self.log.debug(\"Skipping", "\"Grinder: %s\" % os.path.basename(self.script) else: label = None self.widget =", "\"/\")) load = self.get_load() if load.iterations or load.concurrency: fds.write(\"grinder.runs=%s\\n\" %", "fds.write(\"grinder.hostID=%s\\n\" % self.exec_id) fds.write(\"grinder.script=%s\\n\" % self.script.replace(os.path.sep, \"/\")) fds.write(\"grinder.logDirectory=%s\\n\" % self.engine.artifacts_dir.replace(os.path.sep,", "self.idx[field.strip()] = _ix data_fields, worker_id = self.__split(line) if not data_fields:", "License. \"\"\" import os import re import time from bzt", "exception otherwise. :return: bool :raise TaurusToolError: \"\"\" self.retcode = self.process.poll()", "start = time.time() for lnum, line in enumerate(self.lines): if not", "\"-classpath\", self.tool_path, \"net.grinder.Grinder\"]) if err: out += err self.log.debug(\"%s stdout:", "def gen_runner_class(self): runner_classdef = self.gen_class_definition(\"TestRunner\", [\"object\"]) sleep_method = self.gen_method_definition(\"rampUpSleeper\", [\"self\"])", "def __write_base_props(self, fds): \"\"\" write base properties and base properties", "\"https://sourceforge.net/settings/mirror_choices?projectname=grinder&filename=The%20Grinder\" \\ \"%203/{version}/grinder-{version}-binary.zip&dialog=true\" DOWNLOAD_LINK = \"https://downloads.sourceforge.net/project/grinder/The%20Grinder%203/{version}\" \\ \"/grinder-{version}-binary.zip?r=&ts=\" + str(int(time.time()))", "r_code, bytes_count): url = '' error_msg = None for lineNo", "self.get_load() if load.iterations or load.concurrency: fds.write(\"grinder.runs=%s\\n\" % load.iterations or 0)", "self.__split(line) if not data_fields: self.log.debug(\"Skipping line: %s\", line.strip()) continue yield", "int(load.duration * 1000)) fds.write(\"# taurus load values in case you", "links = [] if self.page_source is not None: self.log.debug('Parsing mirrors...')", "val in iteritems(base_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Base Properies", "not line.startswith('data.'): line_parts = line.split(' ') if len(line_parts) > 1:", "bytes_count): url = '' error_msg = None for lineNo in", "open(script_props_file) as spf: fds.write(spf.read()) fds.write(\"# Script Properies File End: %s\\n\\n\"", "None self.exec_id = \"grinder-bzt-%s\" % id(self) self.properties_file = None self.kpi_file", ":param fds: :return: \"\"\" fds.write(\"# BZT Properies Start\\n\") fds.write(\"grinder.hostID=%s\\n\" %", "= HTTPRequest(%s)' % url_arg, indent=0)) self.root.append(self.gen_statement('test = Test(1, \"%s\")' %", "% self.engine.artifacts_dir.replace(os.path.sep, \"/\")) load = self.get_load() if load.iterations or load.concurrency:", "super(GrinderExecutor, self).__init__() self.script = None self.exec_id = \"grinder-bzt-%s\" % id(self)", "= set() def _read(self, last_pass=False): \"\"\" Generator method that returns", "GrinderScriptBuilder(self.get_scenario(), self.log) builder.label = self.label builder.build_source_code() builder.save(script) return script def", "self.java_helper = self._get_tool(TaurusJavaHelper) required_tools = [self._get_tool(TclLibrary), self._get_tool(JavaVM), self.java_helper, grinder] for", "fds.write(\"taurus.hold_for=%s\\n\" % load.hold) fds.write(\"taurus.iterations=%s\\n\" % load.iterations) fds.write(\"# BZT Properies End\\n\")", "super(DataLogReader, self).__init__() self.report_by_url = False self.log = parent_logger.getChild(self.__class__.__name__) self.file =", "< max(self.idx.values()): return None, None return data_fields, worker_id def __parse_prev_lines(self,", "r_code != '0': error_msg = \"HTTP %s\" % r_code else:", "None self.end_time = None self.retcode = None self.java_helper = None", "\".properties\") with open(self.properties_file, 'w') as fds: self.__write_base_props(fds) self.__write_scenario_props(fds, scenario) self.__write_bzt_props(fds)", "1: if line_parts[1] == 'starting,': # self.concurrency += 1 pass", "self.mirror_manager = GrinderMirrorsManager(self.http_client, self.log, self.version) def check_if_installed(self): self.log.debug(\"Trying %s: %s\",", "PythonGenerator, CALL_PROBLEMS from bzt.utils import unzip, RequiredTool, JavaVM, shutdown_process, TclLibrary,", "= self._get_tool(TaurusJavaHelper) required_tools = [self._get_tool(TclLibrary), self._get_tool(JavaVM), self.java_helper, grinder] for tool", "if tool is still running. Also checks if resulting logs", "\"-binary.zip/download?use_mirror={mirror}\" li_search_pattern = re.compile(r'<li id=\".*?\">') li_elements = li_search_pattern.findall(self.page_source) if li_elements:", "and scenario file props to fds :param fds: :param scenario:", "self.root.append(self.gen_statement(\"defaults.setTimeout(%s)\" % int(global_timeout * 1000), indent=0)) cookie_flag = int(self.scenario.get(\"store-cookie\", True))", "take it from maven and convert to JarTool(?) VERSION =", "fds.write(\"# Scenario Properies End\\n\\n\") def __write_bzt_props(self, fds): \"\"\" Write bzt", "import time from bzt import TaurusConfigError, ToolError from bzt.engine import", "line_parts[1] == 'starting,': # self.concurrency += 1 pass elif line_parts[1]", "val in iteritems(local_props): fds.write(\"%s=%s\\n\" % (key, val)) fds.write(\"# Scenario Properies", "HTTPPluginControl.getHTTPUtilities()\", indent=0)) headers = self.scenario.get_headers() if not self.scenario.get(\"keepalive\", True): headers['Connection']", "self.stdout is not None: with open(self.stdout.name) as fds: contents =", "= None for lineNo in reversed(range(max(lnum - 100, 0), lnum)):", "else \"\" self.root.append(self.gen_statement('request = HTTPRequest(%s)' % url_arg, indent=0)) self.root.append(self.gen_statement('test =", "data :param last_pass: \"\"\" self.log.debug(\"Reading grinder results...\") self.lines = list(self.file.get_lines(size=1024", "License. You may obtain a copy of the License at", "artifact \"\"\" if self.kpi_file: self.engine.existing_artifact(self.kpi_file) super(GrinderExecutor, self).post_process() def __scenario_from_requests(self): \"\"\"", "data_fields = line.split(self.DELIMITER) if not data_fields[1].strip().isdigit(): return None, None if", "\"\" self.start_time = 0 self.end_time = 0 self.concurrency = 0", "\"/\")) fds.write(\"grinder.logDirectory=%s\\n\" % self.engine.artifacts_dir.replace(os.path.sep, \"/\")) load = self.get_load() if load.iterations", "_ix data_fields, worker_id = self.__split(line) if not data_fields: self.log.debug(\"Skipping line:", "self.get_script_path() if script_file_path: resource_files.append(script_file_path) prop_file = self.get_scenario().get(\"properties-file\") if prop_file: resource_files.append(prop_file)", "Properies End\\n\\n\") def __write_scenario_props(self, fds, scenario): \"\"\" Write scenario props", "base_link = \"http://sourceforge.net/projects/grinder/files/The%20Grinder%203/{version}/grinder-{version}\" \\ \"-binary.zip/download?use_mirror={mirror}\" li_search_pattern = re.compile(r'<li id=\".*?\">') li_elements", "line.split(self.DELIMITER) if not data_fields[1].strip().isdigit(): return None, None if len(data_fields) <", "{} grinder_path = settings.get(\"path\", self.LOCAL_PATH) grinder_path = get_full_path(grinder_path) download_link =" ]
[ "= '' length = len(comment) for opt, arg in opts:", "getopt import sys comment = ('#' + sys.argv[1]).encode() opts, args", "'cf:o:xy') optstring = '' length = len(comment) for opt, arg", "optstring = optstring + ' ' + opt infile =", "+ ' ' + opt infile = open(args[0], 'rb') outfile", "= open(args[0], 'rb') outfile = open(out, 'wb') outfile.write((optstring + \"\\n\").encode())", "'rb') outfile = open(out, 'wb') outfile.write((optstring + \"\\n\").encode()) for l", "('#' + sys.argv[1]).encode() opts, args = getopt.getopt(sys.argv[2:], 'cf:o:xy') optstring =", "optstring = '' length = len(comment) for opt, arg in", "if opt == '-o': out = arg elif opt not", "length = len(comment) for opt, arg in opts: if opt", "+ sys.argv[1]).encode() opts, args = getopt.getopt(sys.argv[2:], 'cf:o:xy') optstring = ''", "comment = ('#' + sys.argv[1]).encode() opts, args = getopt.getopt(sys.argv[2:], 'cf:o:xy')", "'wb') outfile.write((optstring + \"\\n\").encode()) for l in infile.readlines(): if l[:length]", "optstring + ' ' + opt infile = open(args[0], 'rb')", "elif opt not in ('-f', '-K'): optstring = optstring +", "len(comment) for opt, arg in opts: if opt == '-o':", "' ' + opt infile = open(args[0], 'rb') outfile =", "sys comment = ('#' + sys.argv[1]).encode() opts, args = getopt.getopt(sys.argv[2:],", "infile = open(args[0], 'rb') outfile = open(out, 'wb') outfile.write((optstring +", "for l in infile.readlines(): if l[:length] != comment: outfile.write(l) sys.exit(0)", "= getopt.getopt(sys.argv[2:], 'cf:o:xy') optstring = '' length = len(comment) for", "= open(out, 'wb') outfile.write((optstring + \"\\n\").encode()) for l in infile.readlines():", "arg in opts: if opt == '-o': out = arg", "in opts: if opt == '-o': out = arg elif", "= len(comment) for opt, arg in opts: if opt ==", "('-f', '-K'): optstring = optstring + ' ' + opt", "outfile = open(out, 'wb') outfile.write((optstring + \"\\n\").encode()) for l in", "'' length = len(comment) for opt, arg in opts: if", "arg elif opt not in ('-f', '-K'): optstring = optstring", "opt not in ('-f', '-K'): optstring = optstring + '", "not in ('-f', '-K'): optstring = optstring + ' '", "import sys comment = ('#' + sys.argv[1]).encode() opts, args =", "getopt.getopt(sys.argv[2:], 'cf:o:xy') optstring = '' length = len(comment) for opt,", "== '-o': out = arg elif opt not in ('-f',", "opt, arg in opts: if opt == '-o': out =", "= optstring + ' ' + opt infile = open(args[0],", "+ \"\\n\").encode()) for l in infile.readlines(): if l[:length] != comment:", "\"\\n\").encode()) for l in infile.readlines(): if l[:length] != comment: outfile.write(l)", "opts, args = getopt.getopt(sys.argv[2:], 'cf:o:xy') optstring = '' length =", "outfile.write((optstring + \"\\n\").encode()) for l in infile.readlines(): if l[:length] !=", "= ('#' + sys.argv[1]).encode() opts, args = getopt.getopt(sys.argv[2:], 'cf:o:xy') optstring", "import getopt import sys comment = ('#' + sys.argv[1]).encode() opts,", "open(out, 'wb') outfile.write((optstring + \"\\n\").encode()) for l in infile.readlines(): if", "open(args[0], 'rb') outfile = open(out, 'wb') outfile.write((optstring + \"\\n\").encode()) for", "for opt, arg in opts: if opt == '-o': out", "opts: if opt == '-o': out = arg elif opt", "args = getopt.getopt(sys.argv[2:], 'cf:o:xy') optstring = '' length = len(comment)", "opt == '-o': out = arg elif opt not in", "'-o': out = arg elif opt not in ('-f', '-K'):", "= arg elif opt not in ('-f', '-K'): optstring =", "' + opt infile = open(args[0], 'rb') outfile = open(out,", "<reponame>moroten/scons<gh_stars>1000+ import getopt import sys comment = ('#' + sys.argv[1]).encode()", "in ('-f', '-K'): optstring = optstring + ' ' +", "out = arg elif opt not in ('-f', '-K'): optstring", "opt infile = open(args[0], 'rb') outfile = open(out, 'wb') outfile.write((optstring", "'-K'): optstring = optstring + ' ' + opt infile", "+ opt infile = open(args[0], 'rb') outfile = open(out, 'wb')", "sys.argv[1]).encode() opts, args = getopt.getopt(sys.argv[2:], 'cf:o:xy') optstring = '' length" ]
[ "\"str_data\": \"<script type='text/javascript'>\" + temp.encode().decode() + \"</script>\" } # self.organized_data.chunks.append(OrganizedChunk(**t))", "in data[\"data\"]: fig_full_path, fig_relative_path = self._build_file(mime_extensions[m], i, chunk_option.fig_caption, chunk_option.name) figs.append(fig_relative_path)", "[] self.organized_data = OrganizedData( global_options = self.executed_data.global_options, chunks = []", "import base64 from pathlib import Path from nbconvert import filters", "None: return False if data[\"output_type\"] == \"display_data\": if \"text/html\" in", "= [\"stream\", \"error\"] if data[\"output_type\"] is None: return False if", ": \"png\", \"image/jpg\" : \"jpg\"} class BaseOrganizer: def __init__(self, executed_data:", "+ \"\\n\" + c.str_data else: t.append(c) self.organized_data.chunks = t @staticmethod", "= data[\"data\"][\"text/plain\"] if \"<table\" in temp: t = {\"type\": \"html_data\",", "markdown_file = os.path.join(self.executed_data.global_options.output_file_dir , markdown_file) # with open(markdown_file, \"w\") as", "fig_name = fig_name + \".\" + extension return os.path.join(self.fig_folder, fig_name),", "} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _organize_doc(self): for index,", "None: fig_name = fig_name + \"_\" + name fig_name =", "True return False def _raw_plots(self, data, chunk_option:ChunkOption): if data[\"output_type\"] is", "extension, index, fig_caption= None, name =None): fig_name = \"\" if", "= global_options.input.dir def _create_fig_folder(self): output_folder = self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True, exist_ok=True) fig_folder", "here return True else: if ((data[\"data\"][\"text/plain\"][0] == \"'\") or (data[\"data\"][\"text/plain\"][0]", "\"se_data\", \"str_data\": data[\"evalue\"] + filters.strip_ansi(\"\".join(data[\"traceback\"])) } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return", "\"str_data\":temp } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return True return False def", "continue present = self._interactive_plots(data) if present: continue present = self._raw_plots(data,", "\"\") if len(d) != 0: return doc else: return None", "doc else: return None # markdown_file = self.executed_data.global_options.input_file_name.split(\".\")[0] + \".md\"", "in (\"sql\"): t = {\"type\": \"sql\", \"str_data\": data['code_text_raw'] } else:", "open(fig_full_path, \"wb\") as f: f.write(bfig) i += 1 return figs", "+ \".\"+ global_options.output.format def _create_output_folder_name(self): global_options = self.organized_data.global_options if global_options.output.dir", "chunk_option) t = {\"type\": \"plot\", \"complex_data\":{\"plots\": plot_infos, \"options\": chunk_option }}", "__init__(self, executed_data: ExecutedData): self.format_started = False self.collected_string = \"\" self.fig_folder", "_coder_string(self, data): list_ = [\"stream\", \"error\"] if data[\"output_type\"] is None:", "= t[-1] else: last_chank = None if last_chank is None:", "data[\"output_type\"] == \"display_data\": plot_infos = self._save_plots(data, chunk_option) t = {\"type\":", "= \"\" self.fig_folder = None self.executed_data = executed_data self.formatted_doc =", "= OrganizedData( global_options = self.executed_data.global_options, chunks = [] ) self._create_output_folder_name()", "not None: if output_type in (\"code\"): t = {\"type\": \"code\",", "_raw_string(self, data): if data[\"output_type\"] is None: return False if data[\"output_type\"]", "is None: t.append(c) else: if (c.type == last_chank.type) & (c.type", "= [] c: OrganizedChunk for c in self.organized_data.chunks: last_chank: OrganizedChunk", "= fig_folder Path(fig_folder).mkdir(parents=True, exist_ok=True) def _parse_raw(self, data, output_type): if data.get(\"code_text_raw\")", "def _organize_doc(self): for index, chunk in enumerate(self.executed_data.chunks): chunk_option = chunk.chunk.options", "present = self._interactive_plots(data) if present: continue present = self._raw_plots(data, chunk_option)", "return True # if \"BokehJS\" in temp: # t =", "executed_data self.formatted_doc = [] self.organized_data = OrganizedData( global_options = self.executed_data.global_options,", "data['text'] } self.organized_data.chunks.append(OrganizedChunk(**t)) if data[\"output_type\"] == \"error\": t = {\"type\":", "if present: continue present = self._raw_string(data) if present: continue present", "= {\"type\": \"code\", \"str_data\": data['code_text_raw'] } elif output_type in (\"sql\"):", "_create_output_folder_name(self): global_options = self.organized_data.global_options if global_options.output.dir is None: global_options.output.dir =", "= os.path.join(self.executed_data.global_options.output_file_dir , markdown_file) # with open(markdown_file, \"w\") as f:", "@staticmethod def _clean_up(doc): d = doc.replace(\" \", \"\").replace(\"\\n\", \"\") if", "chunk_option:ChunkOption): if data[\"output_type\"] is None: return False if data[\"output_type\"] ==", "None: t = {\"type\": \"se_data\", \"str_data\": data['text'] } self.organized_data.chunks.append(OrganizedChunk(**t)) if", "None: if self._clean_up(data['code_text_raw']) is not None: if output_type in (\"code\"):", "data, output_type): if data.get(\"code_text_raw\") is not None: if self._clean_up(data['code_text_raw']) is", "data[\"output_type\"] == \"display_data\": if \"text/html\" in data[\"data\"]: print(self.executed_data.global_options.output.format) if self.executed_data.global_options.output.format", "return False if data[\"output_type\"] == \"execute_result\": if data.get(\"data\") is not", "present: continue present = self._coder_string(data) if present: continue present =", "\"markdown\", \"str_data\": data['code_text_raw'] } self.organized_data.chunks.append(OrganizedChunk(**t)) return True else: return False", "self._coder_string(data) if present: continue present = self._raw_string(data) if present: continue", "self.executed_data.global_options.output.format != \"html\": raise Exception(\"output format is not HTML\") else:", "== \"error\": t = {\"type\": \"se_data\", \"str_data\": data[\"evalue\"] + filters.strip_ansi(\"\".join(data[\"traceback\"]))", "{\"type\": \"html_data\", \"str_data\": \"<script type='text/javascript'>\" + temp.encode().decode() + \"</script>\" }", "temp: # t = {\"type\": \"html_data\", \"str_data\": \"<script type='text/javascript'>\" +", "= result.data present = self._parse_raw(data, result.output_type) if present: continue present", "\"\").replace(\"\\n\", \"\") if len(d) != 0: return doc else: return", "format is not HTML\") else: t = {\"type\": \"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode()", "return True return False def _raw_plots(self, data, chunk_option:ChunkOption): if data[\"output_type\"]", "\"str_data\": data['code_text_raw'] } else: t = {\"type\": \"markdown\", \"str_data\": data['code_text_raw']", "chunk_option = chunk.chunk.options if chunk_option.name: print(f\"organizing {chunk_option.name}\") else: print(f\"organizing index", "\"error\": t = {\"type\": \"se_data\", \"str_data\": data[\"evalue\"] + filters.strip_ansi(\"\".join(data[\"traceback\"])) }", "self._build_file(mime_extensions[m], i, chunk_option.fig_caption, chunk_option.name) figs.append(fig_relative_path) bfig = base64.b64decode(data[\"data\"][m]) with open(fig_full_path,", "c in self.organized_data.chunks: last_chank: OrganizedChunk if len(t)> 0: last_chank =", "if fig_caption is not None: fig_name = fig_name + \"_\"", "os.path.join(self.executed_data.global_options.output_file_dir , markdown_file) # with open(markdown_file, \"w\") as f: #", "= fig_name + \"_\" + fig_caption if name is not", "self.organized_data.chunks: last_chank: OrganizedChunk if len(t)> 0: last_chank = t[-1] else:", "{index}\") results = chunk.results for result in results: data =", "return False if data[\"output_type\"] in list_: if data[\"output_type\"] == \"stream\":", "== \"stream\": if self._clean_up(data['text']) is not None: t = {\"type\":", "if chunk_option.name: print(f\"organizing {chunk_option.name}\") else: print(f\"organizing index {index}\") results =", "# t = {\"type\": \"html_data\", \"str_data\": \"<script type='text/javascript'>\" + temp.encode().decode()", "last_chank.str_data = last_chank.str_data + \"\\n\" + c.str_data else: t.append(c) self.organized_data.chunks", "\"str_data\": data['text'] } self.organized_data.chunks.append(OrganizedChunk(**t)) if data[\"output_type\"] == \"error\": t =", "else: t.append(c) self.organized_data.chunks = t @staticmethod def _clean_up(doc): d =", "if data.get(\"data\") is not None: if 'matplotlib' in data[\"data\"][\"text/plain\"]: #", "((data[\"data\"][\"text/plain\"][0] == \"'\") or (data[\"data\"][\"text/plain\"][0] == '\"')): temp = data[\"data\"][\"text/plain\"][1:-1]", "bfig = base64.b64decode(data[\"data\"][m]) with open(fig_full_path, \"wb\") as f: f.write(bfig) i", "else: return None # markdown_file = self.executed_data.global_options.input_file_name.split(\".\")[0] + \".md\" #", "continue present = self._raw_plots(data, chunk_option) if present: continue print(\"not supported", "fig_caption= None, name =None): fig_name = \"\" if fig_caption is", "pathlib import Path from nbconvert import filters from pygments.formatters.latex import", "+ str(index) fig_name = fig_name + \".\" + extension return", "data[\"output_type\"] == \"error\": t = {\"type\": \"se_data\", \"str_data\": data[\"evalue\"] +", "HTML\") else: t = {\"type\": \"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return", "= t @staticmethod def _clean_up(doc): d = doc.replace(\" \", \"\").replace(\"\\n\",", "= self._save_plots(data, chunk_option) t = {\"type\": \"plot\", \"complex_data\":{\"plots\": plot_infos, \"options\":", "HTMLFormatter mime_extensions = {\"image/png\" : \"png\", \"image/jpg\" : \"jpg\"} class", "last_chank: OrganizedChunk if len(t)> 0: last_chank = t[-1] else: last_chank", "False def _raw_plots(self, data, chunk_option:ChunkOption): if data[\"output_type\"] is None: return", "+ \".md\" # markdown_file = os.path.join(self.executed_data.global_options.output_file_dir , markdown_file) # with", "self.organized_data.chunks.append(OrganizedChunk(**t)) return True else: return False def _coder_string(self, data): list_", "\"plot\", \"complex_data\":{\"plots\": plot_infos, \"options\": chunk_option }} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return", "BaseOrganizer: def __init__(self, executed_data: ExecutedData): self.format_started = False self.collected_string =", "fig_name = \"\" if fig_caption is not None: fig_name =", "fig_name) def _interactive_plots(self, data): if data[\"output_type\"] is None: return False", "print(f\"organizing index {index}\") results = chunk.results for result in results:", "def _raw_plots(self, data, chunk_option:ChunkOption): if data[\"output_type\"] is None: return False", "\".\"+ global_options.output.format def _create_output_folder_name(self): global_options = self.organized_data.global_options if global_options.output.dir is", "self.format_started = False self.collected_string = \"\" self.fig_folder = None self.executed_data", "return False def _raw_string(self, data): if data[\"output_type\"] is None: return", "else: if (c.type == last_chank.type) & (c.type != \"plot\"): last_chank.str_data", "is None: return False if data[\"output_type\"] == \"display_data\": plot_infos =", "doc.replace(\" \", \"\").replace(\"\\n\", \"\") if len(d) != 0: return doc", "_interactive_plots(self, data): if data[\"output_type\"] is None: return False if data[\"output_type\"]", "\"BokehJS\" in temp: # t = {\"type\": \"html_data\", \"str_data\": \"<script", "t = {\"type\": \"sql\", \"str_data\": data['code_text_raw'] } else: t =", "if output_type in (\"code\"): t = {\"type\": \"code\", \"str_data\": data['code_text_raw']", "{chunk_option.name}\") else: print(f\"organizing index {index}\") results = chunk.results for result", "not None: if self._clean_up(data['code_text_raw']) is not None: if output_type in", "in mime_extensions: if m in data[\"data\"]: fig_full_path, fig_relative_path = self._build_file(mime_extensions[m],", "fig_name + \"_\" + fig_caption if name is not None:", "} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return True return False def _raw_plots(self,", "present: continue present = self._raw_plots(data, chunk_option) if present: continue print(\"not", "is not HTML\") else: t = {\"type\": \"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode() }", "if present: continue present = self._raw_plots(data, chunk_option) if present: continue", "return True return False def _raw_string(self, data): if data[\"output_type\"] is", "\"str_data\": data['code_text_raw'] } elif output_type in (\"sql\"): t = {\"type\":", "if data[\"output_type\"] == \"display_data\": plot_infos = self._save_plots(data, chunk_option) t =", "if data[\"output_type\"] == \"display_data\": if \"text/html\" in data[\"data\"]: print(self.executed_data.global_options.output.format) if", "+= 1 return figs def _build_file(self, extension, index, fig_caption= None,", "chunk.results for result in results: data = result.data present =", "False if data[\"output_type\"] == \"display_data\": if \"text/html\" in data[\"data\"]: print(self.executed_data.global_options.output.format)", "else: temp = data[\"data\"][\"text/plain\"] if \"<table\" in temp: t =", "mime_extensions: if m in data[\"data\"]: fig_full_path, fig_relative_path = self._build_file(mime_extensions[m], i,", "return False def _organize_doc(self): for index, chunk in enumerate(self.executed_data.chunks): chunk_option", "+ name fig_name = fig_name + \"_\" + str(index) fig_name", "fig_name + \".\" + extension return os.path.join(self.fig_folder, fig_name), os.path.join(self.fig_folder, fig_name)", ": \"jpg\"} class BaseOrganizer: def __init__(self, executed_data: ExecutedData): self.format_started =", "(\"sql\"): t = {\"type\": \"sql\", \"str_data\": data['code_text_raw'] } else: t", "else: t = {\"type\": \"markdown\", \"str_data\": data['code_text_raw'] } self.organized_data.chunks.append(OrganizedChunk(**t)) return", "+ filters.strip_ansi(\"\".join(data[\"traceback\"])) } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _raw_string(self,", "c: OrganizedChunk for c in self.organized_data.chunks: last_chank: OrganizedChunk if len(t)>", "results = chunk.results for result in results: data = result.data", "mime_extensions = {\"image/png\" : \"png\", \"image/jpg\" : \"jpg\"} class BaseOrganizer:", "from zen_knit.formattor.html_formatter import HTMLFormatter mime_extensions = {\"image/png\" : \"png\", \"image/jpg\"", "= self.executed_data.global_options, chunks = [] ) self._create_output_folder_name() self._create_fig_folder() self._organize_doc() self._create_output_file_name()", "\"html_data\", \"str_data\": \"<script type='text/javascript'>\" + temp.encode().decode() + \"</script>\" } #", "self.organized_data.global_options if global_options.output.dir is None: global_options.output.dir = global_options.input.dir def _create_fig_folder(self):", "def _build_file(self, extension, index, fig_caption= None, name =None): fig_name =", "} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _raw_string(self, data): if", "chunk_option }} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _save_plots(self, data,", "zen_knit.formattor.html_formatter import HTMLFormatter mime_extensions = {\"image/png\" : \"png\", \"image/jpg\" :", "is None: return False if data[\"output_type\"] in list_: if data[\"output_type\"]", "{\"type\": \"e_data\", \"str_data\":temp } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return True return", "[] ) self._create_output_folder_name() self._create_fig_folder() self._organize_doc() self._create_output_file_name() def _create_output_file_name(self): global_options =", "data[\"output_type\"] == \"stream\": if self._clean_up(data['text']) is not None: t =", "self._create_fig_folder() self._organize_doc() self._create_output_file_name() def _create_output_file_name(self): global_options = self.organized_data.global_options global_options.output.file_name =", "t.append(c) self.organized_data.chunks = t @staticmethod def _clean_up(doc): d = doc.replace(\"", "return False if data[\"output_type\"] == \"display_data\": if \"text/html\" in data[\"data\"]:", "= {\"type\": \"html_data\", \"str_data\": \"<script type='text/javascript'>\" + temp.encode().decode() + \"</script>\"", "from zen_knit.data_types import ChunkOption, ExecutedData, OrganizedChunk, OrganizedData from zen_knit.formattor.html_formatter import", "with open(fig_full_path, \"wb\") as f: f.write(bfig) i += 1 return", "c.str_data else: t.append(c) self.organized_data.chunks = t @staticmethod def _clean_up(doc): d", "f.write(bfig) i += 1 return figs def _build_file(self, extension, index,", "data['code_text_raw'] } self.organized_data.chunks.append(OrganizedChunk(**t)) return True else: return False def _coder_string(self,", "self._raw_string(data) if present: continue present = self._interactive_plots(data) if present: continue", "def _create_output_file_name(self): global_options = self.organized_data.global_options global_options.output.file_name = global_options.input.file_name.split(\".\")[0] + \".\"+", "\"e_data\", \"str_data\":temp } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return True return False", "True return True return False def _raw_plots(self, data, chunk_option:ChunkOption): if", "\"display_data\": if \"text/html\" in data[\"data\"]: print(self.executed_data.global_options.output.format) if self.executed_data.global_options.output.format != \"html\":", "is not None: if output_type in (\"code\"): t = {\"type\":", "self.organized_data.chunks = t @staticmethod def _clean_up(doc): d = doc.replace(\" \",", "return True return False def _organize_doc(self): for index, chunk in", "in self.organized_data.chunks: last_chank: OrganizedChunk if len(t)> 0: last_chank = t[-1]", "== last_chank.type) & (c.type != \"plot\"): last_chank.str_data = last_chank.str_data +", "\"str_data\": data['code_text_raw'] } self.organized_data.chunks.append(OrganizedChunk(**t)) return True else: return False def", "None: if 'matplotlib' in data[\"data\"][\"text/plain\"]: # Doing nothing here return", "\"html\": raise Exception(\"output format is not HTML\") else: t =", "== \"display_data\": plot_infos = self._save_plots(data, chunk_option) t = {\"type\": \"plot\",", "for index, chunk in enumerate(self.executed_data.chunks): chunk_option = chunk.chunk.options if chunk_option.name:", "\"image/jpg\" : \"jpg\"} class BaseOrganizer: def __init__(self, executed_data: ExecutedData): self.format_started", "def _interactive_plots(self, data): if data[\"output_type\"] is None: return False if", "None if last_chank is None: t.append(c) else: if (c.type ==", "self.organized_data.chunks.append(OrganizedChunk(**t)) return True # if \"BokehJS\" in temp: # t", "if present: continue present = self._interactive_plots(data) if present: continue present", "data) t = [] c: OrganizedChunk for c in self.organized_data.chunks:", "data[\"output_type\"] == \"execute_result\": if data.get(\"data\") is not None: if 'matplotlib'", "markdown_file = self.executed_data.global_options.input_file_name.split(\".\")[0] + \".md\" # markdown_file = os.path.join(self.executed_data.global_options.output_file_dir ,", "True if self._clean_up(temp) is not None: t = {\"type\": \"e_data\",", "i += 1 return figs def _build_file(self, extension, index, fig_caption=", "data[\"output_type\"] in list_: if data[\"output_type\"] == \"stream\": if self._clean_up(data['text']) is", "def __init__(self, executed_data: ExecutedData): self.format_started = False self.collected_string = \"\"", "self._parse_raw(data, result.output_type) if present: continue present = self._coder_string(data) if present:", "t = {\"type\": \"html_data\", \"str_data\":temp.encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True #", "zen_knit.data_types import ChunkOption, ExecutedData, OrganizedChunk, OrganizedData from zen_knit.formattor.html_formatter import HTMLFormatter", "= None self.executed_data = executed_data self.formatted_doc = [] self.organized_data =", "self.fig_folder = fig_folder Path(fig_folder).mkdir(parents=True, exist_ok=True) def _parse_raw(self, data, output_type): if", "return figs def _build_file(self, extension, index, fig_caption= None, name =None):", "fig_name = fig_name + \"_\" + str(index) fig_name = fig_name", "if data[\"output_type\"] is None: return False if data[\"output_type\"] in list_:", "= executed_data self.formatted_doc = [] self.organized_data = OrganizedData( global_options =", "t = {\"type\": \"markdown\", \"str_data\": data['code_text_raw'] } self.organized_data.chunks.append(OrganizedChunk(**t)) return True", "OrganizedData( global_options = self.executed_data.global_options, chunks = [] ) self._create_output_folder_name() self._create_fig_folder()", "global_options.output.format def _create_output_folder_name(self): global_options = self.organized_data.global_options if global_options.output.dir is None:", "# if \"BokehJS\" in temp: # t = {\"type\": \"html_data\",", "= fig_name + \"_\" + str(index) fig_name = fig_name +", "temp: t = {\"type\": \"html_data\", \"str_data\":temp.encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True", "f: f.write(bfig) i += 1 return figs def _build_file(self, extension,", "= self._interactive_plots(data) if present: continue present = self._raw_plots(data, chunk_option) if", "self.executed_data.global_options, chunks = [] ) self._create_output_folder_name() self._create_fig_folder() self._organize_doc() self._create_output_file_name() def", "\"execute_result\": if data.get(\"data\") is not None: if 'matplotlib' in data[\"data\"][\"text/plain\"]:", "import ChunkOption, ExecutedData, OrganizedChunk, OrganizedData from zen_knit.formattor.html_formatter import HTMLFormatter mime_extensions", "\"html_data\", \"str_data\":temp.encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True # if \"BokehJS\" in", "nothing here return True else: if ((data[\"data\"][\"text/plain\"][0] == \"'\") or", "io import os import base64 from pathlib import Path from", "is None: return False if data[\"output_type\"] == \"display_data\": if \"text/html\"", "{\"type\": \"plot\", \"complex_data\":{\"plots\": plot_infos, \"options\": chunk_option }} self.organized_data.chunks.append(OrganizedChunk(**t)) return True", "False self.collected_string = \"\" self.fig_folder = None self.executed_data = executed_data", "is not None: fig_name = fig_name + \"_\" + name", "= {\"type\": \"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False", "ChunkOption, ExecutedData, OrganizedChunk, OrganizedData from zen_knit.formattor.html_formatter import HTMLFormatter mime_extensions =", "if self._clean_up(data['text']) is not None: t = {\"type\": \"se_data\", \"str_data\":", "with open(markdown_file, \"w\") as f: # text = \"\\n\".join(self.formatted_doc) #", "self._interactive_plots(data) if present: continue present = self._raw_plots(data, chunk_option) if present:", "data[\"data\"][\"text/plain\"]: # Doing nothing here return True else: if ((data[\"data\"][\"text/plain\"][0]", "Exception(\"output format is not HTML\") else: t = {\"type\": \"html_data\",", "if \"text/html\" in data[\"data\"]: print(self.executed_data.global_options.output.format) if self.executed_data.global_options.output.format != \"html\": raise", "False def _raw_string(self, data): if data[\"output_type\"] is None: return False", "exist_ok=True) fig_folder = os.path.join(output_folder, self.organized_data.global_options.output.fig_dir) self.fig_folder = fig_folder Path(fig_folder).mkdir(parents=True, exist_ok=True)", "= data[\"data\"][\"text/plain\"][1:-1] else: temp = data[\"data\"][\"text/plain\"] if \"<table\" in temp:", "t = {\"type\": \"se_data\", \"str_data\": data['text'] } self.organized_data.chunks.append(OrganizedChunk(**t)) if data[\"output_type\"]", "\"options\": chunk_option }} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _save_plots(self,", "present = self._raw_string(data) if present: continue present = self._interactive_plots(data) if", "return True else: return False def _coder_string(self, data): list_ =", "Doing nothing here return True else: if ((data[\"data\"][\"text/plain\"][0] == \"'\")", "name =None): fig_name = \"\" if fig_caption is not None:", "!= \"plot\"): last_chank.str_data = last_chank.str_data + \"\\n\" + c.str_data else:", "OrganizedChunk if len(t)> 0: last_chank = t[-1] else: last_chank =", "= [] i = 1 for m in mime_extensions: if", "[] i = 1 for m in mime_extensions: if m", "if data[\"output_type\"] is None: return False if data[\"output_type\"] == \"execute_result\":", "= self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True, exist_ok=True) fig_folder = os.path.join(output_folder, self.organized_data.global_options.output.fig_dir) self.fig_folder =", "ExecutedData, OrganizedChunk, OrganizedData from zen_knit.formattor.html_formatter import HTMLFormatter mime_extensions = {\"image/png\"", "global_options.input.file_name.split(\".\")[0] + \".\"+ global_options.output.format def _create_output_folder_name(self): global_options = self.organized_data.global_options if", "format\", data) t = [] c: OrganizedChunk for c in", "OrganizedChunk, OrganizedData from zen_knit.formattor.html_formatter import HTMLFormatter mime_extensions = {\"image/png\" :", "\"<script type='text/javascript'>\" + temp.encode().decode() + \"</script>\" } # self.organized_data.chunks.append(OrganizedChunk(**t)) #", "Path(output_folder).mkdir(parents=True, exist_ok=True) fig_folder = os.path.join(output_folder, self.organized_data.global_options.output.fig_dir) self.fig_folder = fig_folder Path(fig_folder).mkdir(parents=True,", "= last_chank.str_data + \"\\n\" + c.str_data else: t.append(c) self.organized_data.chunks =", "import LatexFormatter from zen_knit import formattor from zen_knit.data_types import ChunkOption,", "index {index}\") results = chunk.results for result in results: data", "data[\"output_type\"] is None: return False if data[\"output_type\"] == \"display_data\": plot_infos", "chunk_option:ChunkOption): figs = [] i = 1 for m in", "= {\"type\": \"markdown\", \"str_data\": data['code_text_raw'] } self.organized_data.chunks.append(OrganizedChunk(**t)) return True else:", "return True return True return False def _raw_plots(self, data, chunk_option:ChunkOption):", "is None: return False if data[\"output_type\"] == \"execute_result\": if data.get(\"data\")", "if data.get(\"code_text_raw\") is not None: if self._clean_up(data['code_text_raw']) is not None:", "\"'\") or (data[\"data\"][\"text/plain\"][0] == '\"')): temp = data[\"data\"][\"text/plain\"][1:-1] else: temp", "data['code_text_raw'] } elif output_type in (\"sql\"): t = {\"type\": \"sql\",", "print(self.executed_data.global_options.output.format) if self.executed_data.global_options.output.format != \"html\": raise Exception(\"output format is not", "!= 0: return doc else: return None # markdown_file =", "if \"BokehJS\" in temp: # t = {\"type\": \"html_data\", \"str_data\":", "[] c: OrganizedChunk for c in self.organized_data.chunks: last_chank: OrganizedChunk if", "t[-1] else: last_chank = None if last_chank is None: t.append(c)", "last_chank = None if last_chank is None: t.append(c) else: if", "\"text/html\" in data[\"data\"]: print(self.executed_data.global_options.output.format) if self.executed_data.global_options.output.format != \"html\": raise Exception(\"output", "if (c.type == last_chank.type) & (c.type != \"plot\"): last_chank.str_data =", "not None: fig_name = fig_name + \"_\" + name fig_name", "True # if \"BokehJS\" in temp: # t = {\"type\":", "\"\" if fig_caption is not None: fig_name = fig_name +", "\"sql\", \"str_data\": data['code_text_raw'] } else: t = {\"type\": \"markdown\", \"str_data\":", "self._create_output_folder_name() self._create_fig_folder() self._organize_doc() self._create_output_file_name() def _create_output_file_name(self): global_options = self.organized_data.global_options global_options.output.file_name", "_parse_raw(self, data, output_type): if data.get(\"code_text_raw\") is not None: if self._clean_up(data['code_text_raw'])", "data[\"output_type\"] is None: return False if data[\"output_type\"] == \"execute_result\": if", "'\"')): temp = data[\"data\"][\"text/plain\"][1:-1] else: temp = data[\"data\"][\"text/plain\"] if \"<table\"", "\"_\" + name fig_name = fig_name + \"_\" + str(index)", "if data[\"output_type\"] == \"stream\": if self._clean_up(data['text']) is not None: t", "t = {\"type\": \"html_data\", \"str_data\": \"<script type='text/javascript'>\" + temp.encode().decode() +", "self.organized_data.chunks.append(OrganizedChunk(**t)) # return True if self._clean_up(temp) is not None: t", "\"str_data\": data[\"evalue\"] + filters.strip_ansi(\"\".join(data[\"traceback\"])) } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False", "= base64.b64decode(data[\"data\"][m]) with open(fig_full_path, \"wb\") as f: f.write(bfig) i +=", "= {\"image/png\" : \"png\", \"image/jpg\" : \"jpg\"} class BaseOrganizer: def", "return True return False def _save_plots(self, data, chunk_option:ChunkOption): figs =", "(\"code\"): t = {\"type\": \"code\", \"str_data\": data['code_text_raw'] } elif output_type", "(c.type != \"plot\"): last_chank.str_data = last_chank.str_data + \"\\n\" + c.str_data", "is not None: t = {\"type\": \"e_data\", \"str_data\":temp } self.organized_data.chunks.append(OrganizedChunk(**t))", "else: if ((data[\"data\"][\"text/plain\"][0] == \"'\") or (data[\"data\"][\"text/plain\"][0] == '\"')): temp", "type='text/javascript'>\" + temp.encode().decode() + \"</script>\" } # self.organized_data.chunks.append(OrganizedChunk(**t)) # return", "self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _save_plots(self, data, chunk_option:ChunkOption): figs", "= fig_name + \"_\" + name fig_name = fig_name +", "output_folder = self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True, exist_ok=True) fig_folder = os.path.join(output_folder, self.organized_data.global_options.output.fig_dir) self.fig_folder", "\"code\", \"str_data\": data['code_text_raw'] } elif output_type in (\"sql\"): t =", "m in mime_extensions: if m in data[\"data\"]: fig_full_path, fig_relative_path =", "plot_infos = self._save_plots(data, chunk_option) t = {\"type\": \"plot\", \"complex_data\":{\"plots\": plot_infos,", "=None): fig_name = \"\" if fig_caption is not None: fig_name", "for c in self.organized_data.chunks: last_chank: OrganizedChunk if len(t)> 0: last_chank", "\".\" + extension return os.path.join(self.fig_folder, fig_name), os.path.join(self.fig_folder, fig_name) def _interactive_plots(self,", "data): if data[\"output_type\"] is None: return False if data[\"output_type\"] ==", "result.output_type) if present: continue present = self._coder_string(data) if present: continue", "return None # markdown_file = self.executed_data.global_options.input_file_name.split(\".\")[0] + \".md\" # markdown_file", "if self._clean_up(data['code_text_raw']) is not None: if output_type in (\"code\"): t", "if 'matplotlib' in data[\"data\"][\"text/plain\"]: # Doing nothing here return True", "= doc.replace(\" \", \"\").replace(\"\\n\", \"\") if len(d) != 0: return", "t = {\"type\": \"code\", \"str_data\": data['code_text_raw'] } elif output_type in", "base64.b64decode(data[\"data\"][m]) with open(fig_full_path, \"wb\") as f: f.write(bfig) i += 1", "in enumerate(self.executed_data.chunks): chunk_option = chunk.chunk.options if chunk_option.name: print(f\"organizing {chunk_option.name}\") else:", "= chunk.results for result in results: data = result.data present", "data[\"evalue\"] + filters.strip_ansi(\"\".join(data[\"traceback\"])) } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def", "is not None: t = {\"type\": \"se_data\", \"str_data\": data['text'] }", "None: return False if data[\"output_type\"] in list_: if data[\"output_type\"] ==", "0: return doc else: return None # markdown_file = self.executed_data.global_options.input_file_name.split(\".\")[0]", "continue present = self._raw_string(data) if present: continue present = self._interactive_plots(data)", "in (\"code\"): t = {\"type\": \"code\", \"str_data\": data['code_text_raw'] } elif", "raise Exception(\"output format is not HTML\") else: t = {\"type\":", "str(index) fig_name = fig_name + \".\" + extension return os.path.join(self.fig_folder,", "name fig_name = fig_name + \"_\" + str(index) fig_name =", "nbconvert import filters from pygments.formatters.latex import LatexFormatter from zen_knit import", "not None: if 'matplotlib' in data[\"data\"][\"text/plain\"]: # Doing nothing here", "= self.organized_data.global_options global_options.output.file_name = global_options.input.file_name.split(\".\")[0] + \".\"+ global_options.output.format def _create_output_folder_name(self):", "enumerate(self.executed_data.chunks): chunk_option = chunk.chunk.options if chunk_option.name: print(f\"organizing {chunk_option.name}\") else: print(f\"organizing", "data): list_ = [\"stream\", \"error\"] if data[\"output_type\"] is None: return", "{\"type\": \"se_data\", \"str_data\": data[\"evalue\"] + filters.strip_ansi(\"\".join(data[\"traceback\"])) } self.organized_data.chunks.append(OrganizedChunk(**t)) return True", "continue print(\"not supported format\", data) t = [] c: OrganizedChunk", "as f: f.write(bfig) i += 1 return figs def _build_file(self,", "\"_\" + fig_caption if name is not None: fig_name =", "False def _save_plots(self, data, chunk_option:ChunkOption): figs = [] i =", "}} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _save_plots(self, data, chunk_option:ChunkOption):", "_save_plots(self, data, chunk_option:ChunkOption): figs = [] i = 1 for", "# markdown_file = os.path.join(self.executed_data.global_options.output_file_dir , markdown_file) # with open(markdown_file, \"w\")", "t = [] c: OrganizedChunk for c in self.organized_data.chunks: last_chank:", "return True if self._clean_up(temp) is not None: t = {\"type\":", "= self.organized_data.global_options if global_options.output.dir is None: global_options.output.dir = global_options.input.dir def", "output_type): if data.get(\"code_text_raw\") is not None: if self._clean_up(data['code_text_raw']) is not", "+ \"_\" + str(index) fig_name = fig_name + \".\" +", "not None: t = {\"type\": \"e_data\", \"str_data\":temp } self.organized_data.chunks.append(OrganizedChunk(**t)) return", "+ \".\" + extension return os.path.join(self.fig_folder, fig_name), os.path.join(self.fig_folder, fig_name) def", "True else: if ((data[\"data\"][\"text/plain\"][0] == \"'\") or (data[\"data\"][\"text/plain\"][0] == '\"')):", "= {\"type\": \"html_data\", \"str_data\":temp.encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True # if", "= chunk.chunk.options if chunk_option.name: print(f\"organizing {chunk_option.name}\") else: print(f\"organizing index {index}\")", "temp.encode().decode() + \"</script>\" } # self.organized_data.chunks.append(OrganizedChunk(**t)) # return True if", "import io import os import base64 from pathlib import Path", "} self.organized_data.chunks.append(OrganizedChunk(**t)) return True else: return False def _coder_string(self, data):", "figs.append(fig_relative_path) bfig = base64.b64decode(data[\"data\"][m]) with open(fig_full_path, \"wb\") as f: f.write(bfig)", "fig_name = fig_name + \"_\" + name fig_name = fig_name", "def _raw_string(self, data): if data[\"output_type\"] is None: return False if", "pygments.formatters.latex import LatexFormatter from zen_knit import formattor from zen_knit.data_types import", "\"\\n\" + c.str_data else: t.append(c) self.organized_data.chunks = t @staticmethod def", "import os import base64 from pathlib import Path from nbconvert", "= {\"type\": \"plot\", \"complex_data\":{\"plots\": plot_infos, \"options\": chunk_option }} self.organized_data.chunks.append(OrganizedChunk(**t)) return", "self.fig_folder = None self.executed_data = executed_data self.formatted_doc = [] self.organized_data", "name is not None: fig_name = fig_name + \"_\" +", "return False def _raw_plots(self, data, chunk_option:ChunkOption): if data[\"output_type\"] is None:", "= {\"type\": \"sql\", \"str_data\": data['code_text_raw'] } else: t = {\"type\":", "_clean_up(doc): d = doc.replace(\" \", \"\").replace(\"\\n\", \"\") if len(d) !=", "global_options.output.file_name = global_options.input.file_name.split(\".\")[0] + \".\"+ global_options.output.format def _create_output_folder_name(self): global_options =", "if global_options.output.dir is None: global_options.output.dir = global_options.input.dir def _create_fig_folder(self): output_folder", "zen_knit import formattor from zen_knit.data_types import ChunkOption, ExecutedData, OrganizedChunk, OrganizedData", "self._organize_doc() self._create_output_file_name() def _create_output_file_name(self): global_options = self.organized_data.global_options global_options.output.file_name = global_options.input.file_name.split(\".\")[0]", "OrganizedData from zen_knit.formattor.html_formatter import HTMLFormatter mime_extensions = {\"image/png\" : \"png\",", "data[\"data\"]: print(self.executed_data.global_options.output.format) if self.executed_data.global_options.output.format != \"html\": raise Exception(\"output format is", "def _clean_up(doc): d = doc.replace(\" \", \"\").replace(\"\\n\", \"\") if len(d)", "formattor from zen_knit.data_types import ChunkOption, ExecutedData, OrganizedChunk, OrganizedData from zen_knit.formattor.html_formatter", "temp = data[\"data\"][\"text/plain\"] if \"<table\" in temp: t = {\"type\":", "1 return figs def _build_file(self, extension, index, fig_caption= None, name", "else: print(f\"organizing index {index}\") results = chunk.results for result in", "if \"<table\" in temp: t = {\"type\": \"html_data\", \"str_data\":temp.encode().decode() }", "self._raw_plots(data, chunk_option) if present: continue print(\"not supported format\", data) t", "in data[\"data\"]: print(self.executed_data.global_options.output.format) if self.executed_data.global_options.output.format != \"html\": raise Exception(\"output format", "self.executed_data.global_options.input_file_name.split(\".\")[0] + \".md\" # markdown_file = os.path.join(self.executed_data.global_options.output_file_dir , markdown_file) #", "+ c.str_data else: t.append(c) self.organized_data.chunks = t @staticmethod def _clean_up(doc):", "or (data[\"data\"][\"text/plain\"][0] == '\"')): temp = data[\"data\"][\"text/plain\"][1:-1] else: temp =", "chunks = [] ) self._create_output_folder_name() self._create_fig_folder() self._organize_doc() self._create_output_file_name() def _create_output_file_name(self):", "supported format\", data) t = [] c: OrganizedChunk for c", "chunk_option) if present: continue print(\"not supported format\", data) t =", "fig_full_path, fig_relative_path = self._build_file(mime_extensions[m], i, chunk_option.fig_caption, chunk_option.name) figs.append(fig_relative_path) bfig =", "False if data[\"output_type\"] == \"execute_result\": if data.get(\"data\") is not None:", "from pathlib import Path from nbconvert import filters from pygments.formatters.latex", "not None: t = {\"type\": \"se_data\", \"str_data\": data['text'] } self.organized_data.chunks.append(OrganizedChunk(**t))", "# Doing nothing here return True else: if ((data[\"data\"][\"text/plain\"][0] ==", "temp = data[\"data\"][\"text/plain\"][1:-1] else: temp = data[\"data\"][\"text/plain\"] if \"<table\" in", "chunk_option.name: print(f\"organizing {chunk_option.name}\") else: print(f\"organizing index {index}\") results = chunk.results", "} else: t = {\"type\": \"markdown\", \"str_data\": data['code_text_raw'] } self.organized_data.chunks.append(OrganizedChunk(**t))", "= 1 for m in mime_extensions: if m in data[\"data\"]:", "= [] self.organized_data = OrganizedData( global_options = self.executed_data.global_options, chunks =", "None: fig_name = fig_name + \"_\" + fig_caption if name", "last_chank.str_data + \"\\n\" + c.str_data else: t.append(c) self.organized_data.chunks = t", "data[\"data\"][\"text/plain\"][1:-1] else: temp = data[\"data\"][\"text/plain\"] if \"<table\" in temp: t", "_build_file(self, extension, index, fig_caption= None, name =None): fig_name = \"\"", "\"complex_data\":{\"plots\": plot_infos, \"options\": chunk_option }} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False", "= self.executed_data.global_options.input_file_name.split(\".\")[0] + \".md\" # markdown_file = os.path.join(self.executed_data.global_options.output_file_dir , markdown_file)", "self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _raw_string(self, data): if data[\"output_type\"]", "if data[\"output_type\"] == \"error\": t = {\"type\": \"se_data\", \"str_data\": data[\"evalue\"]", "is not None: if self._clean_up(data['code_text_raw']) is not None: if output_type", "is None: global_options.output.dir = global_options.input.dir def _create_fig_folder(self): output_folder = self.organized_data.global_options.output.dir", "return False def _coder_string(self, data): list_ = [\"stream\", \"error\"] if", "fig_caption if name is not None: fig_name = fig_name +", "def _coder_string(self, data): list_ = [\"stream\", \"error\"] if data[\"output_type\"] is", "is not None: fig_name = fig_name + \"_\" + fig_caption", "if present: continue present = self._coder_string(data) if present: continue present", "present: continue print(\"not supported format\", data) t = [] c:", "= [] ) self._create_output_folder_name() self._create_fig_folder() self._organize_doc() self._create_output_file_name() def _create_output_file_name(self): global_options", "if data[\"output_type\"] == \"execute_result\": if data.get(\"data\") is not None: if", "return True else: if ((data[\"data\"][\"text/plain\"][0] == \"'\") or (data[\"data\"][\"text/plain\"][0] ==", "chunk.chunk.options if chunk_option.name: print(f\"organizing {chunk_option.name}\") else: print(f\"organizing index {index}\") results", "= global_options.input.file_name.split(\".\")[0] + \".\"+ global_options.output.format def _create_output_folder_name(self): global_options = self.organized_data.global_options", "continue present = self._coder_string(data) if present: continue present = self._raw_string(data)", "True return False def _organize_doc(self): for index, chunk in enumerate(self.executed_data.chunks):", "present = self._raw_plots(data, chunk_option) if present: continue print(\"not supported format\",", "if len(t)> 0: last_chank = t[-1] else: last_chank = None", "t = {\"type\": \"se_data\", \"str_data\": data[\"evalue\"] + filters.strip_ansi(\"\".join(data[\"traceback\"])) } self.organized_data.chunks.append(OrganizedChunk(**t))", "plot_infos, \"options\": chunk_option }} self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def", "in list_: if data[\"output_type\"] == \"stream\": if self._clean_up(data['text']) is not", "(data[\"data\"][\"text/plain\"][0] == '\"')): temp = data[\"data\"][\"text/plain\"][1:-1] else: temp = data[\"data\"][\"text/plain\"]", "m in data[\"data\"]: fig_full_path, fig_relative_path = self._build_file(mime_extensions[m], i, chunk_option.fig_caption, chunk_option.name)", "== \"'\") or (data[\"data\"][\"text/plain\"][0] == '\"')): temp = data[\"data\"][\"text/plain\"][1:-1] else:", "if data[\"output_type\"] is None: return False if data[\"output_type\"] == \"display_data\":", "self.formatted_doc = [] self.organized_data = OrganizedData( global_options = self.executed_data.global_options, chunks", "LatexFormatter from zen_knit import formattor from zen_knit.data_types import ChunkOption, ExecutedData,", "fig_name), os.path.join(self.fig_folder, fig_name) def _interactive_plots(self, data): if data[\"output_type\"] is None:", "# self.organized_data.chunks.append(OrganizedChunk(**t)) # return True if self._clean_up(temp) is not None:", "index, fig_caption= None, name =None): fig_name = \"\" if fig_caption", "i = 1 for m in mime_extensions: if m in", "t = {\"type\": \"plot\", \"complex_data\":{\"plots\": plot_infos, \"options\": chunk_option }} self.organized_data.chunks.append(OrganizedChunk(**t))", "from pygments.formatters.latex import LatexFormatter from zen_knit import formattor from zen_knit.data_types", "\".md\" # markdown_file = os.path.join(self.executed_data.global_options.output_file_dir , markdown_file) # with open(markdown_file,", "= None if last_chank is None: t.append(c) else: if (c.type", "fig_name + \"_\" + str(index) fig_name = fig_name + \".\"", "output_type in (\"sql\"): t = {\"type\": \"sql\", \"str_data\": data['code_text_raw'] }", "{\"type\": \"code\", \"str_data\": data['code_text_raw'] } elif output_type in (\"sql\"): t", "\"</script>\" } # self.organized_data.chunks.append(OrganizedChunk(**t)) # return True if self._clean_up(temp) is", "fig_relative_path = self._build_file(mime_extensions[m], i, chunk_option.fig_caption, chunk_option.name) figs.append(fig_relative_path) bfig = base64.b64decode(data[\"data\"][m])", "is not None: if 'matplotlib' in data[\"data\"][\"text/plain\"]: # Doing nothing", "= \"\" if fig_caption is not None: fig_name = fig_name", "def _parse_raw(self, data, output_type): if data.get(\"code_text_raw\") is not None: if", "global_options.input.dir def _create_fig_folder(self): output_folder = self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True, exist_ok=True) fig_folder =", "markdown_file) # with open(markdown_file, \"w\") as f: # text =", "# return True if self._clean_up(temp) is not None: t =", "global_options.output.dir = global_options.input.dir def _create_fig_folder(self): output_folder = self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True, exist_ok=True)", "True else: return False def _coder_string(self, data): list_ = [\"stream\",", "else: return False def _coder_string(self, data): list_ = [\"stream\", \"error\"]", "os.path.join(self.fig_folder, fig_name), os.path.join(self.fig_folder, fig_name) def _interactive_plots(self, data): if data[\"output_type\"] is", "for m in mime_extensions: if m in data[\"data\"]: fig_full_path, fig_relative_path", "None, name =None): fig_name = \"\" if fig_caption is not", "None self.executed_data = executed_data self.formatted_doc = [] self.organized_data = OrganizedData(", "print(f\"organizing {chunk_option.name}\") else: print(f\"organizing index {index}\") results = chunk.results for", "{\"type\": \"html_data\", \"str_data\":temp.encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True # if \"BokehJS\"", "self._clean_up(data['text']) is not None: t = {\"type\": \"se_data\", \"str_data\": data['text']", "if self._clean_up(temp) is not None: t = {\"type\": \"e_data\", \"str_data\":temp", "\"_\" + str(index) fig_name = fig_name + \".\" + extension", "figs def _build_file(self, extension, index, fig_caption= None, name =None): fig_name", "data.get(\"code_text_raw\") is not None: if self._clean_up(data['code_text_raw']) is not None: if", "global_options = self.organized_data.global_options if global_options.output.dir is None: global_options.output.dir = global_options.input.dir", "class BaseOrganizer: def __init__(self, executed_data: ExecutedData): self.format_started = False self.collected_string", "None: return False if data[\"output_type\"] == \"display_data\": plot_infos = self._save_plots(data,", "executed_data: ExecutedData): self.format_started = False self.collected_string = \"\" self.fig_folder =", "\"wb\") as f: f.write(bfig) i += 1 return figs def", "in data[\"data\"][\"text/plain\"]: # Doing nothing here return True else: if", "= self._parse_raw(data, result.output_type) if present: continue present = self._coder_string(data) if", "ExecutedData): self.format_started = False self.collected_string = \"\" self.fig_folder = None", "None: t.append(c) else: if (c.type == last_chank.type) & (c.type !=", "{\"type\": \"se_data\", \"str_data\": data['text'] } self.organized_data.chunks.append(OrganizedChunk(**t)) if data[\"output_type\"] == \"error\":", "'matplotlib' in data[\"data\"][\"text/plain\"]: # Doing nothing here return True else:", "data[\"data\"][\"text/plain\"] if \"<table\" in temp: t = {\"type\": \"html_data\", \"str_data\":temp.encode().decode()", "import HTMLFormatter mime_extensions = {\"image/png\" : \"png\", \"image/jpg\" : \"jpg\"}", "present: continue present = self._interactive_plots(data) if present: continue present =", "False def _organize_doc(self): for index, chunk in enumerate(self.executed_data.chunks): chunk_option =", "\"jpg\"} class BaseOrganizer: def __init__(self, executed_data: ExecutedData): self.format_started = False", "self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _organize_doc(self): for index, chunk", "= self._raw_string(data) if present: continue present = self._interactive_plots(data) if present:", "t @staticmethod def _clean_up(doc): d = doc.replace(\" \", \"\").replace(\"\\n\", \"\")", "None: return False if data[\"output_type\"] == \"execute_result\": if data.get(\"data\") is", "self._clean_up(temp) is not None: t = {\"type\": \"e_data\", \"str_data\":temp }", "if last_chank is None: t.append(c) else: if (c.type == last_chank.type)", "self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True, exist_ok=True) fig_folder = os.path.join(output_folder, self.organized_data.global_options.output.fig_dir) self.fig_folder = fig_folder", "if data[\"output_type\"] in list_: if data[\"output_type\"] == \"stream\": if self._clean_up(data['text'])", "data, chunk_option:ChunkOption): figs = [] i = 1 for m", "t = {\"type\": \"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return", "len(d) != 0: return doc else: return None # markdown_file", "base64 from pathlib import Path from nbconvert import filters from", "True return False def _raw_string(self, data): if data[\"output_type\"] is None:", "None: t = {\"type\": \"e_data\", \"str_data\":temp } self.organized_data.chunks.append(OrganizedChunk(**t)) return True", "+ extension return os.path.join(self.fig_folder, fig_name), os.path.join(self.fig_folder, fig_name) def _interactive_plots(self, data):", "\"stream\": if self._clean_up(data['text']) is not None: t = {\"type\": \"se_data\",", "data[\"data\"]: fig_full_path, fig_relative_path = self._build_file(mime_extensions[m], i, chunk_option.fig_caption, chunk_option.name) figs.append(fig_relative_path) bfig", ", markdown_file) # with open(markdown_file, \"w\") as f: # text", "} self.organized_data.chunks.append(OrganizedChunk(**t)) return True # if \"BokehJS\" in temp: #", "figs = [] i = 1 for m in mime_extensions:", "self.organized_data.global_options global_options.output.file_name = global_options.input.file_name.split(\".\")[0] + \".\"+ global_options.output.format def _create_output_folder_name(self): global_options", "import filters from pygments.formatters.latex import LatexFormatter from zen_knit import formattor", "fig_folder = os.path.join(output_folder, self.organized_data.global_options.output.fig_dir) self.fig_folder = fig_folder Path(fig_folder).mkdir(parents=True, exist_ok=True) def", "data, chunk_option:ChunkOption): if data[\"output_type\"] is None: return False if data[\"output_type\"]", "self._clean_up(data['code_text_raw']) is not None: if output_type in (\"code\"): t =", "{\"type\": \"sql\", \"str_data\": data['code_text_raw'] } else: t = {\"type\": \"markdown\",", "!= \"html\": raise Exception(\"output format is not HTML\") else: t", "_raw_plots(self, data, chunk_option:ChunkOption): if data[\"output_type\"] is None: return False if", "not HTML\") else: t = {\"type\": \"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t))", "elif output_type in (\"sql\"): t = {\"type\": \"sql\", \"str_data\": data['code_text_raw']", "False def _coder_string(self, data): list_ = [\"stream\", \"error\"] if data[\"output_type\"]", "None: if output_type in (\"code\"): t = {\"type\": \"code\", \"str_data\":", "{\"image/png\" : \"png\", \"image/jpg\" : \"jpg\"} class BaseOrganizer: def __init__(self,", "= {\"type\": \"se_data\", \"str_data\": data[\"evalue\"] + filters.strip_ansi(\"\".join(data[\"traceback\"])) } self.organized_data.chunks.append(OrganizedChunk(**t)) return", "d = doc.replace(\" \", \"\").replace(\"\\n\", \"\") if len(d) != 0:", "global_options.output.dir is None: global_options.output.dir = global_options.input.dir def _create_fig_folder(self): output_folder =", "from nbconvert import filters from pygments.formatters.latex import LatexFormatter from zen_knit", "Path(fig_folder).mkdir(parents=True, exist_ok=True) def _parse_raw(self, data, output_type): if data.get(\"code_text_raw\") is not", "False if data[\"output_type\"] in list_: if data[\"output_type\"] == \"stream\": if", "if m in data[\"data\"]: fig_full_path, fig_relative_path = self._build_file(mime_extensions[m], i, chunk_option.fig_caption,", "def _create_output_folder_name(self): global_options = self.organized_data.global_options if global_options.output.dir is None: global_options.output.dir", "data[\"output_type\"] is None: return False if data[\"output_type\"] in list_: if", "= self._build_file(mime_extensions[m], i, chunk_option.fig_caption, chunk_option.name) figs.append(fig_relative_path) bfig = base64.b64decode(data[\"data\"][m]) with", "return doc else: return None # markdown_file = self.executed_data.global_options.input_file_name.split(\".\")[0] +", "chunk_option.fig_caption, chunk_option.name) figs.append(fig_relative_path) bfig = base64.b64decode(data[\"data\"][m]) with open(fig_full_path, \"wb\") as", "True return False def _save_plots(self, data, chunk_option:ChunkOption): figs = []", "result in results: data = result.data present = self._parse_raw(data, result.output_type)", "data.get(\"data\") is not None: if 'matplotlib' in data[\"data\"][\"text/plain\"]: # Doing", "0: last_chank = t[-1] else: last_chank = None if last_chank", "result.data present = self._parse_raw(data, result.output_type) if present: continue present =", "# markdown_file = self.executed_data.global_options.input_file_name.split(\".\")[0] + \".md\" # markdown_file = os.path.join(self.executed_data.global_options.output_file_dir", "os.path.join(self.fig_folder, fig_name) def _interactive_plots(self, data): if data[\"output_type\"] is None: return", "= self._raw_plots(data, chunk_option) if present: continue print(\"not supported format\", data)", "+ \"</script>\" } # self.organized_data.chunks.append(OrganizedChunk(**t)) # return True if self._clean_up(temp)", "else: t = {\"type\": \"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True", "data = result.data present = self._parse_raw(data, result.output_type) if present: continue", "from zen_knit import formattor from zen_knit.data_types import ChunkOption, ExecutedData, OrganizedChunk,", "+ temp.encode().decode() + \"</script>\" } # self.organized_data.chunks.append(OrganizedChunk(**t)) # return True", "= self._coder_string(data) if present: continue present = self._raw_string(data) if present:", "in temp: t = {\"type\": \"html_data\", \"str_data\":temp.encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return", "\"error\"] if data[\"output_type\"] is None: return False if data[\"output_type\"] in", "def _create_fig_folder(self): output_folder = self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True, exist_ok=True) fig_folder = os.path.join(output_folder,", "OrganizedChunk for c in self.organized_data.chunks: last_chank: OrganizedChunk if len(t)> 0:", "# with open(markdown_file, \"w\") as f: # text = \"\\n\".join(self.formatted_doc)", "for result in results: data = result.data present = self._parse_raw(data,", "\"str_data\":data[\"data\"][\"text/html\"].encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _organize_doc(self): for", "list_ = [\"stream\", \"error\"] if data[\"output_type\"] is None: return False", "t.append(c) else: if (c.type == last_chank.type) & (c.type != \"plot\"):", "open(markdown_file, \"w\") as f: # text = \"\\n\".join(self.formatted_doc) # f.write(text)", "\"\" self.fig_folder = None self.executed_data = executed_data self.formatted_doc = []", "if present: continue print(\"not supported format\", data) t = []", "in temp: # t = {\"type\": \"html_data\", \"str_data\": \"<script type='text/javascript'>\"", "present = self._parse_raw(data, result.output_type) if present: continue present = self._coder_string(data)", "else: last_chank = None if last_chank is None: t.append(c) else:", "== '\"')): temp = data[\"data\"][\"text/plain\"][1:-1] else: temp = data[\"data\"][\"text/plain\"] if", "chunk_option.name) figs.append(fig_relative_path) bfig = base64.b64decode(data[\"data\"][m]) with open(fig_full_path, \"wb\") as f:", "+ fig_caption if name is not None: fig_name = fig_name", "results: data = result.data present = self._parse_raw(data, result.output_type) if present:", "if name is not None: fig_name = fig_name + \"_\"", "global_options = self.executed_data.global_options, chunks = [] ) self._create_output_folder_name() self._create_fig_folder() self._organize_doc()", "fig_caption is not None: fig_name = fig_name + \"_\" +", "self.organized_data.chunks.append(OrganizedChunk(**t)) return True return True return False def _raw_plots(self, data,", "if ((data[\"data\"][\"text/plain\"][0] == \"'\") or (data[\"data\"][\"text/plain\"][0] == '\"')): temp =", "print(\"not supported format\", data) t = [] c: OrganizedChunk for", "} # self.organized_data.chunks.append(OrganizedChunk(**t)) # return True if self._clean_up(temp) is not", "present: continue present = self._raw_string(data) if present: continue present =", "} self.organized_data.chunks.append(OrganizedChunk(**t)) if data[\"output_type\"] == \"error\": t = {\"type\": \"se_data\",", "_create_output_file_name(self): global_options = self.organized_data.global_options global_options.output.file_name = global_options.input.file_name.split(\".\")[0] + \".\"+ global_options.output.format", "index, chunk in enumerate(self.executed_data.chunks): chunk_option = chunk.chunk.options if chunk_option.name: print(f\"organizing", "last_chank.type) & (c.type != \"plot\"): last_chank.str_data = last_chank.str_data + \"\\n\"", ") self._create_output_folder_name() self._create_fig_folder() self._organize_doc() self._create_output_file_name() def _create_output_file_name(self): global_options = self.organized_data.global_options", "+ \"_\" + name fig_name = fig_name + \"_\" +", "def _save_plots(self, data, chunk_option:ChunkOption): figs = [] i = 1", "exist_ok=True) def _parse_raw(self, data, output_type): if data.get(\"code_text_raw\") is not None:", "return False def _save_plots(self, data, chunk_option:ChunkOption): figs = [] i", "self.organized_data.global_options.output.fig_dir) self.fig_folder = fig_folder Path(fig_folder).mkdir(parents=True, exist_ok=True) def _parse_raw(self, data, output_type):", "= os.path.join(output_folder, self.organized_data.global_options.output.fig_dir) self.fig_folder = fig_folder Path(fig_folder).mkdir(parents=True, exist_ok=True) def _parse_raw(self,", "\"png\", \"image/jpg\" : \"jpg\"} class BaseOrganizer: def __init__(self, executed_data: ExecutedData):", "self.collected_string = \"\" self.fig_folder = None self.executed_data = executed_data self.formatted_doc", "\"str_data\":temp.encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True # if \"BokehJS\" in temp:", "\", \"\").replace(\"\\n\", \"\") if len(d) != 0: return doc else:", "return os.path.join(self.fig_folder, fig_name), os.path.join(self.fig_folder, fig_name) def _interactive_plots(self, data): if data[\"output_type\"]", "= fig_name + \".\" + extension return os.path.join(self.fig_folder, fig_name), os.path.join(self.fig_folder,", "data['code_text_raw'] } else: t = {\"type\": \"markdown\", \"str_data\": data['code_text_raw'] }", "[\"stream\", \"error\"] if data[\"output_type\"] is None: return False if data[\"output_type\"]", "self.organized_data.chunks.append(OrganizedChunk(**t)) if data[\"output_type\"] == \"error\": t = {\"type\": \"se_data\", \"str_data\":", "data[\"output_type\"] is None: return False if data[\"output_type\"] == \"display_data\": if", "} elif output_type in (\"sql\"): t = {\"type\": \"sql\", \"str_data\":", "_organize_doc(self): for index, chunk in enumerate(self.executed_data.chunks): chunk_option = chunk.chunk.options if", "in results: data = result.data present = self._parse_raw(data, result.output_type) if", "t = {\"type\": \"e_data\", \"str_data\":temp } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return", "list_: if data[\"output_type\"] == \"stream\": if self._clean_up(data['text']) is not None:", "present = self._coder_string(data) if present: continue present = self._raw_string(data) if", "self._create_output_file_name() def _create_output_file_name(self): global_options = self.organized_data.global_options global_options.output.file_name = global_options.input.file_name.split(\".\")[0] +", "filters.strip_ansi(\"\".join(data[\"traceback\"])) } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _raw_string(self, data):", "self.organized_data = OrganizedData( global_options = self.executed_data.global_options, chunks = [] )", "chunk in enumerate(self.executed_data.chunks): chunk_option = chunk.chunk.options if chunk_option.name: print(f\"organizing {chunk_option.name}\")", "os.path.join(output_folder, self.organized_data.global_options.output.fig_dir) self.fig_folder = fig_folder Path(fig_folder).mkdir(parents=True, exist_ok=True) def _parse_raw(self, data,", "import Path from nbconvert import filters from pygments.formatters.latex import LatexFormatter", "Path from nbconvert import filters from pygments.formatters.latex import LatexFormatter from", "= {\"type\": \"se_data\", \"str_data\": data['text'] } self.organized_data.chunks.append(OrganizedChunk(**t)) if data[\"output_type\"] ==", "not None: fig_name = fig_name + \"_\" + fig_caption if", "self._save_plots(data, chunk_option) t = {\"type\": \"plot\", \"complex_data\":{\"plots\": plot_infos, \"options\": chunk_option", "fig_name = fig_name + \"_\" + fig_caption if name is", "fig_name + \"_\" + name fig_name = fig_name + \"_\"", "== \"display_data\": if \"text/html\" in data[\"data\"]: print(self.executed_data.global_options.output.format) if self.executed_data.global_options.output.format !=", "{\"type\": \"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def", "& (c.type != \"plot\"): last_chank.str_data = last_chank.str_data + \"\\n\" +", "i, chunk_option.fig_caption, chunk_option.name) figs.append(fig_relative_path) bfig = base64.b64decode(data[\"data\"][m]) with open(fig_full_path, \"wb\")", "(c.type == last_chank.type) & (c.type != \"plot\"): last_chank.str_data = last_chank.str_data", "= False self.collected_string = \"\" self.fig_folder = None self.executed_data =", "filters from pygments.formatters.latex import LatexFormatter from zen_knit import formattor from", "\"plot\"): last_chank.str_data = last_chank.str_data + \"\\n\" + c.str_data else: t.append(c)", "\"se_data\", \"str_data\": data['text'] } self.organized_data.chunks.append(OrganizedChunk(**t)) if data[\"output_type\"] == \"error\": t", "global_options = self.organized_data.global_options global_options.output.file_name = global_options.input.file_name.split(\".\")[0] + \".\"+ global_options.output.format def", "if self.executed_data.global_options.output.format != \"html\": raise Exception(\"output format is not HTML\")", "False if data[\"output_type\"] == \"display_data\": plot_infos = self._save_plots(data, chunk_option) t", "extension return os.path.join(self.fig_folder, fig_name), os.path.join(self.fig_folder, fig_name) def _interactive_plots(self, data): if", "fig_folder Path(fig_folder).mkdir(parents=True, exist_ok=True) def _parse_raw(self, data, output_type): if data.get(\"code_text_raw\") is", "{\"type\": \"markdown\", \"str_data\": data['code_text_raw'] } self.organized_data.chunks.append(OrganizedChunk(**t)) return True else: return", "== \"execute_result\": if data.get(\"data\") is not None: if 'matplotlib' in", "\"<table\" in temp: t = {\"type\": \"html_data\", \"str_data\":temp.encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t))", "+ \"_\" + fig_caption if name is not None: fig_name", "self.executed_data = executed_data self.formatted_doc = [] self.organized_data = OrganizedData( global_options", "1 for m in mime_extensions: if m in data[\"data\"]: fig_full_path,", "import formattor from zen_knit.data_types import ChunkOption, ExecutedData, OrganizedChunk, OrganizedData from", "\"display_data\": plot_infos = self._save_plots(data, chunk_option) t = {\"type\": \"plot\", \"complex_data\":{\"plots\":", "last_chank is None: t.append(c) else: if (c.type == last_chank.type) &", "len(t)> 0: last_chank = t[-1] else: last_chank = None if", "= {\"type\": \"e_data\", \"str_data\":temp } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return True", "None: global_options.output.dir = global_options.input.dir def _create_fig_folder(self): output_folder = self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True,", "\"html_data\", \"str_data\":data[\"data\"][\"text/html\"].encode().decode() } self.organized_data.chunks.append(OrganizedChunk(**t)) return True return False def _organize_doc(self):", "last_chank = t[-1] else: last_chank = None if last_chank is", "<gh_stars>10-100 import io import os import base64 from pathlib import", "return False if data[\"output_type\"] == \"display_data\": plot_infos = self._save_plots(data, chunk_option)", "output_type in (\"code\"): t = {\"type\": \"code\", \"str_data\": data['code_text_raw'] }", "os import base64 from pathlib import Path from nbconvert import", "if len(d) != 0: return doc else: return None #", "_create_fig_folder(self): output_folder = self.organized_data.global_options.output.dir Path(output_folder).mkdir(parents=True, exist_ok=True) fig_folder = os.path.join(output_folder, self.organized_data.global_options.output.fig_dir)", "None # markdown_file = self.executed_data.global_options.input_file_name.split(\".\")[0] + \".md\" # markdown_file =" ]
[ "(CameraRgb or CameraDepth) object, the active camera of the robot.", "self_colliding - Boolean, if True at least one of the", "for each joint, has to be strictly superior to 0", "- List of floats containing the joint's positions \"\"\" joint_positions", "isSelfColliding(self, link_names): \"\"\" Specifies if a link is colliding with", "joint_names, joint_values, percentage_speeds): joint_speed =\\ self.joint_dict[joint_name].getMaxVelocity() *\\ percentage_speed pybullet.setJointMotorControl2( self.robot_model,", "resolution of the camera \"\"\" try: self.active_camera = self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution)", "model to which the module is associated. Returns: robot_model -", "in the setAngles parameters\") for joint_name, joint_value, percentage_speed in zip(", "- List of string containing the names of the joints", "quaternion, useFixedBase=False, globalScaling=1.0, physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION | pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except pybullet.error as", "i in range(pybullet.getNumJoints( self.robot_model, physicsClientId=self.physics_client)): if IS_VERSION_PYTHON_3: # PYTHON 3", "return self.active_camera.getCameraLink() except KeyError: raise pybullet.error(\"No active camera, cannot retrieve", "self.description_file, translation, quaternion, useFixedBase=False, globalScaling=1.0, physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION | pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except", "the rest of the virtual robot. Parameters: link_names - String", "of the robot's base on the y axis in meters", "\"\"\" Returns the id of the simulated instance in which", "object, the active camera of the robot. If there is", "len(joint_values) ==\\ len(percentage_speeds) assert all( speed >= 0.0 and speed", "is str: assert link_names in self.link_dict.keys() names = [link_names] else:", "joint, has to be strictly superior to 0 and inferior", "is found, nothing is unsubscribed assert self.active_camera is not None", "AssertionError: pass def getCameraFrame(self): \"\"\" Returns a camera frame. Be", "import pybullet from qibullet.camera import * from qibullet.link import Link", "None return self.active_camera.getFrame() except AssertionError: raise pybullet.error(\"No active camera, cannot", "unavailable\") def getCameraLink(self): \"\"\" Returns the link of the active", "z axis in meters \"\"\" position, quaternions = pybullet.getBasePositionAndOrientation( self.robot_model,", "print(\"This camera does not exist, use a valid camera id\")", "the joints Returns: joint_velocities - List of floats containing the", "self.robot_model, physicsClientId=self.physics_client)): if IS_VERSION_PYTHON_3: # PYTHON 3 version needs a", "in self.link_dict.keys() names = [link_names] else: assert set(link_names).issubset(self.link_dict.keys()) names =", "The position of the robot's base on the x axis,", "version needs a conversion bytes to str joint_info = pybullet.getJointInfo(", "of the robot. Returns: active_camera - Camera (CameraRgb or CameraDepth)", "no active camera is found, nothing is unsubscribed assert self.active_camera", "string containing the names of the joints Returns: joint_velocities -", "camera_id - The id of the camera to be subscribed", "supposed to be loaded Returns: boolean - True if the", "description_file - The file giving the description of the virtual", "in radians to be applied percentage_speeds - Percentages of the", "cannot retrieve any link\") def getActiveCamera(self): \"\"\" Returns the active", "of the robot in the WORLD frame quaternion - List", "the camera id. Parameters: camera_id - The id of the", "getCameraLink(self): \"\"\" Returns the link of the active camera. Be", "the one holding the camera id. Parameters: camera_id - The", "\"\"\" Returns the pybullet model to which the module is", "quaternion - List containing 4 elements, the quaternion [x, y,", "- String or list of string containing the names of", "self.joint_dict[joint_info[1].decode('utf-8')] =\\ Joint(joint_info) else: # PYTHON 2 Version joint_info =", "1.0 for speed in percentage_speeds) except AssertionError: raise pybullet.error(\"Error in", "id of the camera to be unsubscribed \"\"\" try: #", "camera frame. Be advised that the subscribeCamera method needs to", "\"\"\" try: assert self.active_camera is not None return self.active_camera.getFrame() except", "else: assert set(link_names).issubset(self.link_dict.keys()) names = list(link_names) for name in names:", "resolution unavailable\") def getCameraLink(self): \"\"\" Returns the link of the", "ran correctly, False otherwise \"\"\" try: self.physics_client = physicsClientId self.robot_model", "links descriptions. The joints are set to 0 rad. Parameters:", "returned \"\"\" return self.active_camera def getPosition(self): \"\"\" Gets the position", "bodyA=self.robot_model, bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) if len(contact_tuple) != 0: return True", "a camera, the one holding the camera id. Parameters: camera_id", "import Link from qibullet.joint import Joint IS_VERSION_PYTHON_3 = sys.version_info[0] >=", "to be controlled joint_values - List of values corresponding to", "camera frame as a formatted numpy array, directly exploitable from", "import sys import pybullet from qibullet.camera import * from qibullet.link", "except pybullet.error as e: raise pybullet.error(\"Cannot load robot model: \"", "Parameters: joint_names - List of string containing the name of", "resolution=Camera.K_QVGA): \"\"\" Subscribe to the camera holding the camera id.", "does not exist, use a valid camera id\") except AssertionError:", "robot_model - The pybullet model of the robot \"\"\" return", "with corresponding meshes should be used, otherwise the link cannot", "bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) if len(contact_tuple) != 0: return True return", "\"\"\" return self.robot_model def getPhysicsClientId(self): \"\"\" Returns the id of", "for speed in percentage_speeds) except AssertionError: raise pybullet.error(\"Error in the", "position[1], theta def isSelfColliding(self, link_names): \"\"\" Specifies if a link", "self.robot_model def getPhysicsClientId(self): \"\"\" Returns the id of the simulated", "frame. Be advised that the subscribeCamera method needs to be", "position of the robot's joints in radians. If one of", "the validity of the input parameters. Parameters: joint_names - List", "be used, otherwise the link cannot self collide Returns: self_colliding", "=\\ Joint(joint_info) else: # PYTHON 2 Version joint_info = pybullet.getJointInfo(", "least one of the links is self colliding \"\"\" try:", "import * from qibullet.link import Link from qibullet.joint import Joint", "= dict() def loadRobot(self, translation, quaternion, physicsClientId=0): \"\"\" Loads the", "* from qibullet.link import Link from qibullet.joint import Joint IS_VERSION_PYTHON_3", "module is loaded. Returns: physics_client - The id of the", "sys import pybullet from qibullet.camera import * from qibullet.link import", "Link from qibullet.joint import Joint IS_VERSION_PYTHON_3 = sys.version_info[0] >= 3", "else: # PYTHON 2 Version joint_info = pybullet.getJointInfo( self.robot_model, i,", "return self.active_camera.getResolution() except KeyError: raise pybullet.error(\"No active camera, resolution unavailable\")", "try: if type(link_names) is str: assert link_names in self.link_dict.keys() names", "= None except KeyError: print(\"This camera does not exist, use", "camera is attached \"\"\" try: assert self.active_camera is not None", "KeyError: raise pybullet.error(\"No active camera, cannot retrieve any link\") def", "quaternion [x, y, z, q] of the robot in the", "Version joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12]] = Link(joint_info)", "has to be strictly superior to 0 and inferior or", "radians to be applied percentage_speeds - Percentages of the max", "rad/s. If one of the joint doesn't exist, the method", "- The rotation of the robot's base on the z", "IS_VERSION_PYTHON_3: # PYTHON 3 version needs a conversion bytes to", "the robot's joints. Tests have to be performed by the", "pybullet.setJointMotorControl2( self.robot_model, self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL, targetPosition=joint_value, maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client) def getAnglesPosition(self,", "getPosition(self): \"\"\" Gets the position of the robot's base in", "angles on the robot's joints. Tests have to be performed", "except KeyError: raise pybullet.error(\"No active camera, cannot retrieve any link\")", "frame\") def getCameraResolution(self): \"\"\" Returns the resolution of the active", "the robot in the WORLD frame physicsClientId - The id", "camera to be unsubscribed \"\"\" try: # If no active", "active camera of the robot. Returns: active_camera - Camera (CameraRgb", "- List containing 4 elements, the quaternion [x, y, z,", "only one camera can be subscribed. Parameters: camera_id - The", "Joint IS_VERSION_PYTHON_3 = sys.version_info[0] >= 3 class RobotVirtual: \"\"\" Mother", "cannot self collide Returns: self_colliding - Boolean, if True at", "str joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')] =\\ Link(joint_info)", "base in the world frame. Returns: x - The position", "the module) is spawned \"\"\" return self.physics_client def setAngles(self, joint_names,", "None return self.active_camera.getResolution() except KeyError: raise pybullet.error(\"No active camera, resolution", "= sys.version_info[0] >= 3 class RobotVirtual: \"\"\" Mother class representing", "= dict() self.link_dict = dict() def loadRobot(self, translation, quaternion, physicsClientId=0):", "class representing a virtual robot \"\"\" def __init__(self, description_file): \"\"\"", "are set to 0 rad. Parameters: translation - List containing", "robot in the WORLD frame quaternion - List containing 4", "= Joint(joint_info) def getRobotModel(self): \"\"\" Returns the pybullet model to", "of string containing the names of the joints Returns: joint_positions", "self.active_camera.getCameraLink() except KeyError: raise pybullet.error(\"No active camera, cannot retrieve any", "<filename>qibullet/robot_virtual.py #!/usr/bin/env python # coding: utf-8 import sys import pybullet", "True at least one of the links is self colliding", "\"\"\" try: self.physics_client = physicsClientId self.robot_model = pybullet.loadURDF( self.description_file, translation,", "try: # If no active camera is found, nothing is", "resolution - a CameraResolution object describing the resolution of the", "elements, the translation [x, y, z] of the robot in", "of floats containing the joint's positions \"\"\" joint_positions = list()", "\"\"\" def __init__(self, description_file): \"\"\" Constructor Parameters: description_file - The", "Returns: physics_client - The id of the simulation in which", "the robot's joints in radians. If one of the joint", "of the active camera \"\"\" try: assert self.active_camera is not", "be strictly superior to 0 and inferior or equal to", "rest of the virtual robot. Parameters: link_names - String or", "percentage_speed pybullet.setJointMotorControl2( self.robot_model, self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL, targetPosition=joint_value, maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client) def", "def getPosition(self): \"\"\" Gets the position of the robot's base", "file giving the description of the virtual robot. For now,", "of the camera \"\"\" try: self.active_camera = self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution) except", "pybullet error will be raised. Returns: resolution - a Link", "holding the camera id. Parameters: camera_id - The id of", "self.active_camera def getPosition(self): \"\"\" Gets the position of the robot's", "position[0], position[1], theta def isSelfColliding(self, link_names): \"\"\" Specifies if a", "should be used, otherwise the link cannot self collide Returns:", "KeyError. Parameters: joint_names - List of string containing the names", "the moment, only one camera can be subscribed. Parameters: camera_id", "None if self.active_camera.getCameraId() == camera_id: self.active_camera.unsubscribe() self.active_camera = None except", "percentage_speeds): \"\"\" Set angles on the robot's joints. Tests have", "containing the names of the links to be checked for", "| pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except pybullet.error as e: raise pybullet.error(\"Cannot load robot", "raised. Returns: resolution - a CameraResolution object describing the resolution", "== pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')] =\\ Joint(joint_info) else:", "the robot's base on the y axis in meters theta", "joint's velocities \"\"\" joint_velocities = list() for joint_name in joint_names:", "- CameraResolution object, the resolution of the camera \"\"\" try:", "camera. Be advised that the subscribeCamera method needs to be", "velocities \"\"\" joint_velocities = list() for joint_name in joint_names: joint_velocities.append(pybullet.getJointState(", "robot's base on the x axis, in meters y -", "virtual robot. Parameters: link_names - String or list of string", "z, q] of the robot in the WORLD frame physicsClientId", "each joint, has to be strictly superior to 0 and", "joint_velocities - List of floats containing the joint's velocities \"\"\"", "self.active_camera.unsubscribe() self.active_camera = None except KeyError: print(\"This camera does not", "camera, cannot retrieve any link\") def getActiveCamera(self): \"\"\" Returns the", "None is returned \"\"\" return self.active_camera def getPosition(self): \"\"\" Gets", "bodyA=self.robot_model, bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) contact_tuple += pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(),", "List of string containing the name of the joints to", "containing 3 elements, the translation [x, y, z] of the", "values corresponding to the angles in radians to be applied", "setAngles parameters\") for joint_name, joint_value, percentage_speed in zip( joint_names, joint_values,", "percentage_speeds) except AssertionError: raise pybullet.error(\"Error in the setAngles parameters\") for", "== camera_id: self.active_camera.unsubscribe() self.active_camera = None except KeyError: print(\"This camera", "y, z, q] of the robot in the WORLD frame", "one of the joint doesn't exist, the method will raise", "\"\"\" Returns a camera frame. Be advised that the subscribeCamera", "the camera to be unsubscribed \"\"\" try: # If no", "def getCameraLink(self): \"\"\" Returns the link of the active camera.", "to 0 rad. Parameters: translation - List containing 3 elements,", "sys.version_info[0] >= 3 class RobotVirtual: \"\"\" Mother class representing a", "the link of the active camera. Be advised that the", "on the x axis, in meters y - The positions", "self collision. WARNING: only the links with corresponding meshes should", "self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')] =\\ Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC", "to which the module is associated. Returns: robot_model - The", "pybullet.error(\"Error in the setAngles parameters\") for joint_name, joint_value, percentage_speed in", "or list of string containing the names of the links", "try: assert len(joint_names) ==\\ len(joint_values) ==\\ len(percentage_speeds) assert all( speed", "active camera, resolution unavailable\") def getCameraLink(self): \"\"\" Returns the link", "not None if self.active_camera.getCameraId() == camera_id: self.active_camera.unsubscribe() self.active_camera = None", "Returns: boolean - True if the method ran correctly, False", "of the simulation in which the robot (possessing the module)", "in the world frame. Returns: x - The position of", "base on the y axis in meters theta - The", "is not None if self.active_camera.getCameraId() == camera_id: self.active_camera.unsubscribe() self.active_camera =", "False except AssertionError: raise pybullet.error( \"Unauthorized link checking for self", "the robot. If there is no active camera, a None", "self collide Returns: self_colliding - Boolean, if True at least", "a CameraResolution object describing the resolution of the active camera", "return position[0], position[1], theta def isSelfColliding(self, link_names): \"\"\" Specifies if", "joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]] = Joint(joint_info)", "self.robot_model, self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL, targetPosition=joint_value, maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client) def getAnglesPosition(self, joint_names):", "found, nothing is unsubscribed assert self.active_camera is not None if", "array, directly exploitable from OpenCV \"\"\" try: assert self.active_camera is", "# If no active camera is found, nothing is unsubscribed", "0 rad. Parameters: translation - List containing 3 elements, the", "elements, the quaternion [x, y, z, q] of the robot", "0 and inferior or equal to 1 \"\"\" try: assert", "frame - The current camera frame as a formatted numpy", "of the robot \"\"\" return self.robot_model def getPhysicsClientId(self): \"\"\" Returns", "the setAngles parameters\") for joint_name, joint_value, percentage_speed in zip( joint_names,", "resolution of the active camera. Be advised that the subscribeCamera", "self.link_dict.keys() names = [link_names] else: assert set(link_names).issubset(self.link_dict.keys()) names = list(link_names)", "frame. Returns: x - The position of the robot's base", "- List of values corresponding to the angles in radians", "return self.physics_client def setAngles(self, joint_names, joint_values, percentage_speeds): \"\"\" Set angles", "RobotVirtual: \"\"\" Mother class representing a virtual robot \"\"\" def", "on the robot's joints. Tests have to be performed by", "try: self.physics_client = physicsClientId self.robot_model = pybullet.loadURDF( self.description_file, translation, quaternion,", "= list(link_names) for name in names: contact_tuple = pybullet.getContactPoints( bodyA=self.robot_model,", "colliding with the rest of the virtual robot. Parameters: link_names", "pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) if len(contact_tuple) != 0: return", "self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0]) return joint_positions def getAnglesVelocity(self, joint_names): \"\"\" Gets the", "None return self.active_camera.getCameraLink() except KeyError: raise pybullet.error(\"No active camera, cannot", "pybullet error will be raised. Returns: resolution - a CameraResolution", "performed by the child class to guarantee the validity of", "the simulated instance in which the robot is supposed to", "of the robot's joints in rad/s. If one of the", "to be used for each joint, has to be strictly", "#!/usr/bin/env python # coding: utf-8 import sys import pybullet from", "__init__(self, description_file): \"\"\" Constructor Parameters: description_file - The file giving", "dict() def loadRobot(self, translation, quaternion, physicsClientId=0): \"\"\" Loads the robot", "translation, quaternion, physicsClientId=0): \"\"\" Loads the robot into a simulation,", "percentage_speeds): joint_speed =\\ self.joint_dict[joint_name].getMaxVelocity() *\\ percentage_speed pybullet.setJointMotorControl2( self.robot_model, self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL,", "retrieve any frame\") def getCameraResolution(self): \"\"\" Returns the resolution of", "in names: contact_tuple = pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) contact_tuple", "in the WORLD frame quaternion - List containing 4 elements,", "virtual robot \"\"\" def __init__(self, description_file): \"\"\" Constructor Parameters: description_file", "description of the virtual robot. For now, only URDF is", "of the input parameters. Parameters: joint_names - List of string", "List of floats containing the joint's positions \"\"\" joint_positions =", "in joint_names: joint_positions.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0]) return joint_positions def getAnglesVelocity(self,", "getCameraResolution(self): \"\"\" Returns the resolution of the active camera. Be", "the names of the joints Returns: joint_positions - List of", "there is no active camera, a None is returned \"\"\"", "4 elements, the quaternion [x, y, z, q] of the", "y - The positions of the robot's base on the", "= pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) contact_tuple += pybullet.getContactPoints( bodyA=self.robot_model,", "pybullet error will be raised. Returns: frame - The current", "return True return False except AssertionError: raise pybullet.error( \"Unauthorized link", "len(joint_names) ==\\ len(joint_values) ==\\ len(percentage_speeds) assert all( speed >= 0.0", "containing the names of the joints Returns: joint_positions - List", "camera does not exist, use a valid camera id\") except", "the module is associated. Returns: robot_model - The pybullet model", "containing the name of the joints to be controlled joint_values", "Returns: frame - The current camera frame as a formatted", "CameraResolution object, the resolution of the camera \"\"\" try: self.active_camera", "String or list of string containing the names of the", "joint_names, joint_values, percentage_speeds): \"\"\" Set angles on the robot's joints.", "exist, use a valid camera id\") def unsubscribeCamera(self, camera_id): \"\"\"", "link is colliding with the rest of the virtual robot.", "which the robot (possessing the module) is spawned \"\"\" return", "subscribeCamera(self, camera_id, resolution=Camera.K_QVGA): \"\"\" Subscribe to the camera holding the", "2 Version joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12]] =", "3 version needs a conversion bytes to str joint_info =", "collide Returns: self_colliding - Boolean, if True at least one", "module is associated. Returns: robot_model - The pybullet model of", "the virtual robot. For now, only URDF is handled \"\"\"", "frame physicsClientId - The id of the simulated instance in", "joint_positions = list() for joint_name in joint_names: joint_positions.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(),", "Returns: resolution - a Link object describing the link to", "the robot in the WORLD frame quaternion - List containing", "applied percentage_speeds - Percentages of the max speed to be", "checked for self collision. WARNING: only the links with corresponding", "python # coding: utf-8 import sys import pybullet from qibullet.camera", "pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')] =\\ Link(joint_info) if joint_info[2] ==", "percentage_speed in zip( joint_names, joint_values, percentage_speeds): joint_speed =\\ self.joint_dict[joint_name].getMaxVelocity() *\\", "if IS_VERSION_PYTHON_3: # PYTHON 3 version needs a conversion bytes", "joint_names: joint_velocities.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1]) return joint_velocities def subscribeCamera(self, camera_id,", "describing the link to which the active camera is attached", "the active camera of the robot. If there is no", "will raise a KeyError. Parameters: joint_names - List of string", "the world frame. Returns: x - The position of the", "or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]] = Joint(joint_info) def getRobotModel(self): \"\"\"", "the max speed to be used for each joint, has", "of the joint doesn't exist, the method will raise a", "\"\"\" try: self.active_camera = self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution) except KeyError: print(\"This camera", "any link\") def getActiveCamera(self): \"\"\" Returns the active camera of", "pybullet.getEulerFromQuaternion(quaternions)[2] return position[0], position[1], theta def isSelfColliding(self, link_names): \"\"\" Specifies", "qibullet.link import Link from qibullet.joint import Joint IS_VERSION_PYTHON_3 = sys.version_info[0]", "joint_speed =\\ self.joint_dict[joint_name].getMaxVelocity() *\\ percentage_speed pybullet.setJointMotorControl2( self.robot_model, self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL, targetPosition=joint_value,", "camera id\") def unsubscribeCamera(self, camera_id): \"\"\" Unsubscribe from a camera,", "type(link_names) is str: assert link_names in self.link_dict.keys() names = [link_names]", "Parameters: description_file - The file giving the description of the", "a Link object describing the link to which the active", "if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]] =", "the link cannot self collide Returns: self_colliding - Boolean, if", "return self.active_camera def getPosition(self): \"\"\" Gets the position of the", "return False except AssertionError: raise pybullet.error( \"Unauthorized link checking for", "valid camera id\") except AssertionError: pass def getCameraFrame(self): \"\"\" Returns", "rad. Parameters: translation - List containing 3 elements, the translation", "active camera, cannot retrieve any frame\") def getCameraResolution(self): \"\"\" Returns", "the links descriptions. The joints are set to 0 rad.", "3 elements, the translation [x, y, z] of the robot", "at the moment, only one camera can be subscribed. Parameters:", "Returns the id of the simulated instance in which the", "simulated instance in which the module is loaded. Returns: physics_client", "joint_values, percentage_speeds): \"\"\" Set angles on the robot's joints. Tests", "description_file self.physics_client = 0 self.active_camera = None self.camera_dict = dict()", "needs a conversion bytes to str joint_info = pybullet.getJointInfo( self.robot_model,", "joint_values, percentage_speeds): joint_speed =\\ self.joint_dict[joint_name].getMaxVelocity() *\\ percentage_speed pybullet.setJointMotorControl2( self.robot_model, self.joint_dict[joint_name].getIndex(),", "- The current camera frame as a formatted numpy array,", "as a formatted numpy array, directly exploitable from OpenCV \"\"\"", "= pybullet.getBasePositionAndOrientation( self.robot_model, physicsClientId=self.physics_client) theta = pybullet.getEulerFromQuaternion(quaternions)[2] return position[0], position[1],", "[x, y, z, q] of the robot in the WORLD", "If no active camera is found, nothing is unsubscribed assert", "list() for joint_name in joint_names: joint_velocities.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1]) return", "id\") except AssertionError: pass def getCameraFrame(self): \"\"\" Returns a camera", "name in names: contact_tuple = pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client)", "camera can be subscribed. Parameters: camera_id - The id of", "import Joint IS_VERSION_PYTHON_3 = sys.version_info[0] >= 3 class RobotVirtual: \"\"\"", "WORLD frame physicsClientId - The id of the simulated instance", "of floats containing the joint's velocities \"\"\" joint_velocities = list()", "names of the links to be checked for self collision.", "the input parameters. Parameters: joint_names - List of string containing", "of the camera to be unsubscribed \"\"\" try: # If", "is not None return self.active_camera.getResolution() except KeyError: raise pybullet.error(\"No active", "joint_positions def getAnglesVelocity(self, joint_names): \"\"\" Gets the velocity of the", "\"\"\" try: if type(link_names) is str: assert link_names in self.link_dict.keys()", "the robot's base in the world frame. Returns: x -", "to be unsubscribed \"\"\" try: # If no active camera", "self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL, targetPosition=joint_value, maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client) def getAnglesPosition(self, joint_names): \"\"\"", "joint's positions \"\"\" joint_positions = list() for joint_name in joint_names:", "the id of the simulated instance in which the module", "instance in which the robot is supposed to be loaded", "Tests have to be performed by the child class to", "the link to which the active camera is attached \"\"\"", "names = list(link_names) for name in names: contact_tuple = pybullet.getContactPoints(", "Gets the position of the robot's joints in radians. If", "the camera \"\"\" try: self.active_camera = self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution) except KeyError:", "self.description_file = description_file self.physics_client = 0 self.active_camera = None self.camera_dict", "False otherwise \"\"\" try: self.physics_client = physicsClientId self.robot_model = pybullet.loadURDF(", "KeyError: print(\"This camera does not exist, use a valid camera", "joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]] = Joint(joint_info) def getRobotModel(self): \"\"\" Returns", "resolution - a Link object describing the link to which", "only URDF is handled \"\"\" self.description_file = description_file self.physics_client =", "be called beforehand, otherwise a pybullet error will be raised.", "model: \" + str(e)) for i in range(pybullet.getNumJoints( self.robot_model, physicsClientId=self.physics_client)):", "Joint(joint_info) def getRobotModel(self): \"\"\" Returns the pybullet model to which", "assert set(link_names).issubset(self.link_dict.keys()) names = list(link_names) for name in names: contact_tuple", "= self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution) except KeyError: print(\"This camera does not exist,", "pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12]] = Link(joint_info) if joint_info[2] ==", "joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12]] = Link(joint_info) if", "be controlled joint_values - List of values corresponding to the", "globalScaling=1.0, physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION | pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except pybullet.error as e: raise", "Percentages of the max speed to be used for each", "to 1 \"\"\" try: assert len(joint_names) ==\\ len(joint_values) ==\\ len(percentage_speeds)", "useFixedBase=False, globalScaling=1.0, physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION | pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except pybullet.error as e:", "\"\"\" try: assert len(joint_names) ==\\ len(joint_values) ==\\ len(percentage_speeds) assert all(", "The id of the simulation in which the robot (possessing", "use a valid camera id\") except AssertionError: pass def getCameraFrame(self):", "def getPhysicsClientId(self): \"\"\" Returns the id of the simulated instance", "the virtual robot. Parameters: link_names - String or list of", "describing the resolution of the active camera \"\"\" try: assert", "the links with corresponding meshes should be used, otherwise the", "= dict() self.joint_dict = dict() self.link_dict = dict() def loadRobot(self,", "or CameraDepth) object, the active camera of the robot. If", "= None self.camera_dict = dict() self.joint_dict = dict() self.link_dict =", "in meters y - The positions of the robot's base", "(possessing the module) is spawned \"\"\" return self.physics_client def setAngles(self,", "assert self.active_camera is not None return self.active_camera.getResolution() except KeyError: raise", "linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) contact_tuple += pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) if", "the child class to guarantee the validity of the input", "camera_id): \"\"\" Unsubscribe from a camera, the one holding the", "loadRobot(self, translation, quaternion, physicsClientId=0): \"\"\" Loads the robot into a", "self.camera_dict = dict() self.joint_dict = dict() self.link_dict = dict() def", "a valid camera id\") except AssertionError: pass def getCameraFrame(self): \"\"\"", "joint_positions - List of floats containing the joint's positions \"\"\"", "=\\ Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE:", "camera_id - The id of the camera to be unsubscribed", "3 class RobotVirtual: \"\"\" Mother class representing a virtual robot", "dict() self.joint_dict = dict() self.link_dict = dict() def loadRobot(self, translation,", "raise pybullet.error(\"Error in the setAngles parameters\") for joint_name, joint_value, percentage_speed", "pybullet.error(\"Cannot load robot model: \" + str(e)) for i in", "controlled joint_values - List of values corresponding to the angles", "def __init__(self, description_file): \"\"\" Constructor Parameters: description_file - The file", "containing the names of the joints Returns: joint_velocities - List", "camera \"\"\" try: self.active_camera = self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution) except KeyError: print(\"This", "camera is found, nothing is unsubscribed assert self.active_camera is not", "The id of the camera to be subscribed resolution -", "active camera of the robot. If there is no active", "the names of the links to be checked for self", "self.active_camera = None self.camera_dict = dict() self.joint_dict = dict() self.link_dict", "== pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]] = Joint(joint_info) def getRobotModel(self): \"\"\" Returns the", "pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]] = Joint(joint_info) def getRobotModel(self): \"\"\" Returns the pybullet", "frame quaternion - List containing 4 elements, the quaternion [x,", "id of the simulation in which the robot (possessing the", "current camera frame as a formatted numpy array, directly exploitable", "a None is returned \"\"\" return self.active_camera def getPosition(self): \"\"\"", "len(contact_tuple) != 0: return True return False except AssertionError: raise", "0 self.active_camera = None self.camera_dict = dict() self.joint_dict = dict()", "List of floats containing the joint's velocities \"\"\" joint_velocities =", "list() for joint_name in joint_names: joint_positions.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0]) return", "will be raised. Returns: resolution - a CameraResolution object describing", "The id of the camera to be unsubscribed \"\"\" try:", "radians. If one of the joint doesn't exist, the method", "Returns the resolution of the active camera. Be advised that", "a link is colliding with the rest of the virtual", "which the robot is supposed to be loaded Returns: boolean", "joint_value, percentage_speed in zip( joint_names, joint_values, percentage_speeds): joint_speed =\\ self.joint_dict[joint_name].getMaxVelocity()", "theta def isSelfColliding(self, link_names): \"\"\" Specifies if a link is", "Parameters: link_names - String or list of string containing the", "only the links with corresponding meshes should be used, otherwise", "self.link_dict[joint_info[12]] = Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] ==", "robot's joints in rad/s. If one of the joint doesn't", "robot's base on the y axis in meters theta -", "camera of the robot. Returns: active_camera - Camera (CameraRgb or", "robot. Parameters: link_names - String or list of string containing", "base on the z axis in meters \"\"\" position, quaternions", "Returns the active camera of the robot. Returns: active_camera -", "from OpenCV \"\"\" try: assert self.active_camera is not None return", "as e: raise pybullet.error(\"Cannot load robot model: \" + str(e))", "0.0 and speed <= 1.0 for speed in percentage_speeds) except", "of the joints Returns: joint_velocities - List of floats containing", "def unsubscribeCamera(self, camera_id): \"\"\" Unsubscribe from a camera, the one", "if self.active_camera.getCameraId() == camera_id: self.active_camera.unsubscribe() self.active_camera = None except KeyError:", "world frame. Returns: x - The position of the robot's", "moment, only one camera can be subscribed. Parameters: camera_id -", "subscribeCamera method needs to be called beforehand, otherwise a pybullet", "def isSelfColliding(self, link_names): \"\"\" Specifies if a link is colliding", "AssertionError: raise pybullet.error(\"No active camera, cannot retrieve any frame\") def", "Mother class representing a virtual robot \"\"\" def __init__(self, description_file):", "unsubscribed \"\"\" try: # If no active camera is found,", "on the z axis in meters \"\"\" position, quaternions =", "now, only URDF is handled \"\"\" self.description_file = description_file self.physics_client", "guarantee the validity of the input parameters. Parameters: joint_names -", "error will be raised. Returns: resolution - a CameraResolution object", "of string containing the names of the joints Returns: joint_velocities", "quaternion, physicsClientId=0): \"\"\" Loads the robot into a simulation, loads", "URDF is handled \"\"\" self.description_file = description_file self.physics_client = 0", "self.active_camera is not None if self.active_camera.getCameraId() == camera_id: self.active_camera.unsubscribe() self.active_camera", "the robot's base on the x axis, in meters y", "base on the x axis, in meters y - The", "joint_names): \"\"\" Gets the velocity of the robot's joints in", "pybullet.getBasePositionAndOrientation( self.robot_model, physicsClientId=self.physics_client) theta = pybullet.getEulerFromQuaternion(quaternions)[2] return position[0], position[1], theta", "\"\"\" Specifies if a link is colliding with the rest", "str(e)) for i in range(pybullet.getNumJoints( self.robot_model, physicsClientId=self.physics_client)): if IS_VERSION_PYTHON_3: #", "True return False except AssertionError: raise pybullet.error( \"Unauthorized link checking", "= description_file self.physics_client = 0 self.active_camera = None self.camera_dict =", "id of the simulated instance in which the robot is", "doesn't exist, the method will raise a KeyError. Parameters: joint_names", "a KeyError. Parameters: joint_names - List of string containing the", "The current camera frame as a formatted numpy array, directly", "try: assert self.active_camera is not None return self.active_camera.getCameraLink() except KeyError:", "x axis, in meters y - The positions of the", "None self.camera_dict = dict() self.joint_dict = dict() self.link_dict = dict()", "method ran correctly, False otherwise \"\"\" try: self.physics_client = physicsClientId", "be applied percentage_speeds - Percentages of the max speed to", "positions \"\"\" joint_positions = list() for joint_name in joint_names: joint_positions.append(pybullet.getJointState(", "self.joint_dict[joint_info[1]] = Joint(joint_info) def getRobotModel(self): \"\"\" Returns the pybullet model", "the position of the robot's base in the world frame.", "The file giving the description of the virtual robot. For", "the translation [x, y, z] of the robot in the", "load robot model: \" + str(e)) for i in range(pybullet.getNumJoints(", "joint_values - List of values corresponding to the angles in", "pybullet.error(\"No active camera, cannot retrieve any link\") def getActiveCamera(self): \"\"\"", "into a simulation, loads the joints and the links descriptions.", "pybullet.error as e: raise pybullet.error(\"Cannot load robot model: \" +", "of the joints Returns: joint_positions - List of floats containing", "= Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE:", "of the virtual robot. Parameters: link_names - String or list", "contact_tuple = pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) contact_tuple += pybullet.getContactPoints(", "to be performed by the child class to guarantee the", "joint_velocities.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1]) return joint_velocities def subscribeCamera(self, camera_id, resolution=Camera.K_QVGA):", "of the robot's base on the z axis in meters", "self.physics_client = physicsClientId self.robot_model = pybullet.loadURDF( self.description_file, translation, quaternion, useFixedBase=False,", "the robot (possessing the module) is spawned \"\"\" return self.physics_client", "1 \"\"\" try: assert len(joint_names) ==\\ len(joint_values) ==\\ len(percentage_speeds) assert", "camera, the one holding the camera id. Parameters: camera_id -", "links with corresponding meshes should be used, otherwise the link", "pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) contact_tuple += pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model,", "or equal to 1 \"\"\" try: assert len(joint_names) ==\\ len(joint_values)", "the method will raise a KeyError. Parameters: joint_names - List", "the resolution of the active camera. Be advised that the", "containing 4 elements, the quaternion [x, y, z, q] of", "y axis in meters theta - The rotation of the", "CameraResolution object describing the resolution of the active camera \"\"\"", ">= 0.0 and speed <= 1.0 for speed in percentage_speeds)", "loaded. Returns: physics_client - The id of the simulation in", "active camera, a None is returned \"\"\" return self.active_camera def", "object describing the link to which the active camera is", "self.active_camera = None except KeyError: print(\"This camera does not exist,", "self.active_camera.getResolution() except KeyError: raise pybullet.error(\"No active camera, resolution unavailable\") def", "- The pybullet model of the robot \"\"\" return self.robot_model", "exist, use a valid camera id\") except AssertionError: pass def", "collision. WARNING: only the links with corresponding meshes should be", "a simulation, loads the joints and the links descriptions. The", "physicsClientId=self.physics_client)[1]) return joint_velocities def subscribeCamera(self, camera_id, resolution=Camera.K_QVGA): \"\"\" Subscribe to", "robot is supposed to be loaded Returns: boolean - True", "cannot retrieve any frame\") def getCameraResolution(self): \"\"\" Returns the resolution", "raise pybullet.error(\"No active camera, cannot retrieve any link\") def getActiveCamera(self):", "If one of the joint doesn't exist, the method will", "List containing 3 elements, the translation [x, y, z] of", "of string containing the names of the links to be", "List of string containing the names of the joints Returns:", "is loaded. Returns: physics_client - The id of the simulation", "assert len(joint_names) ==\\ len(joint_values) ==\\ len(percentage_speeds) assert all( speed >=", "Loads the robot into a simulation, loads the joints and", "to be called beforehand, otherwise a pybullet error will be", "link to which the active camera is attached \"\"\" try:", "Returns: joint_positions - List of floats containing the joint's positions", "corresponding meshes should be used, otherwise the link cannot self", "def getActiveCamera(self): \"\"\" Returns the active camera of the robot.", ">= 3 class RobotVirtual: \"\"\" Mother class representing a virtual", "child class to guarantee the validity of the input parameters.", "- True if the method ran correctly, False otherwise \"\"\"", "to be strictly superior to 0 and inferior or equal", "formatted numpy array, directly exploitable from OpenCV \"\"\" try: assert", "camera \"\"\" try: assert self.active_camera is not None return self.active_camera.getResolution()", "the camera id. WARNING: at the moment, only one camera", "== pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]] = Joint(joint_info) def", "= list() for joint_name in joint_names: joint_positions.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0])", "meters \"\"\" position, quaternions = pybullet.getBasePositionAndOrientation( self.robot_model, physicsClientId=self.physics_client) theta =", "is attached \"\"\" try: assert self.active_camera is not None return", "coding: utf-8 import sys import pybullet from qibullet.camera import *", "pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]] = Joint(joint_info) def getRobotModel(self):", "speed <= 1.0 for speed in percentage_speeds) except AssertionError: raise", "WARNING: at the moment, only one camera can be subscribed.", "axis in meters theta - The rotation of the robot's", "utf-8 import sys import pybullet from qibullet.camera import * from", "method needs to be called beforehand, otherwise a pybullet error", "error will be raised. Returns: resolution - a Link object", "robot's joints in radians. If one of the joint doesn't", "floats containing the joint's velocities \"\"\" joint_velocities = list() for", "id of the camera to be subscribed resolution - CameraResolution", "the velocity of the robot's joints in rad/s. If one", "otherwise a pybullet error will be raised. Returns: frame -", "\"\"\" try: assert self.active_camera is not None return self.active_camera.getResolution() except", "be checked for self collision. WARNING: only the links with", "PYTHON 2 Version joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12]]", "be raised. Returns: resolution - a CameraResolution object describing the", "the resolution of the active camera \"\"\" try: assert self.active_camera", "be loaded Returns: boolean - True if the method ran", "of the max speed to be used for each joint,", "def subscribeCamera(self, camera_id, resolution=Camera.K_QVGA): \"\"\" Subscribe to the camera holding", "=\\ self.joint_dict[joint_name].getMaxVelocity() *\\ percentage_speed pybullet.setJointMotorControl2( self.robot_model, self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL, targetPosition=joint_value, maxVelocity=joint_speed,", "names: contact_tuple = pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) contact_tuple +=", "in zip( joint_names, joint_values, percentage_speeds): joint_speed =\\ self.joint_dict[joint_name].getMaxVelocity() *\\ percentage_speed", "getPhysicsClientId(self): \"\"\" Returns the id of the simulated instance in", "one holding the camera id. Parameters: camera_id - The id", "assert self.active_camera is not None if self.active_camera.getCameraId() == camera_id: self.active_camera.unsubscribe()", "range(pybullet.getNumJoints( self.robot_model, physicsClientId=self.physics_client)): if IS_VERSION_PYTHON_3: # PYTHON 3 version needs", "input parameters. Parameters: joint_names - List of string containing the", "to the angles in radians to be applied percentage_speeds -", "for joint_name in joint_names: joint_velocities.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1]) return joint_velocities", "superior to 0 and inferior or equal to 1 \"\"\"", "which the module is loaded. Returns: physics_client - The id", "if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')] =\\", "and speed <= 1.0 for speed in percentage_speeds) except AssertionError:", "AssertionError: raise pybullet.error(\"Error in the setAngles parameters\") for joint_name, joint_value,", "= [link_names] else: assert set(link_names).issubset(self.link_dict.keys()) names = list(link_names) for name", "\"\"\" Returns the link of the active camera. Be advised", "link of the active camera. Be advised that the subscribeCamera", "physicsClientId=self.physics_client)[0]) return joint_positions def getAnglesVelocity(self, joint_names): \"\"\" Gets the velocity", "self.robot_model, physicsClientId=self.physics_client) theta = pybullet.getEulerFromQuaternion(quaternions)[2] return position[0], position[1], theta def", "Constructor Parameters: description_file - The file giving the description of", "\"\"\" Loads the robot into a simulation, loads the joints", "theta = pybullet.getEulerFromQuaternion(quaternions)[2] return position[0], position[1], theta def isSelfColliding(self, link_names):", "colliding \"\"\" try: if type(link_names) is str: assert link_names in", "translation, quaternion, useFixedBase=False, globalScaling=1.0, physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION | pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except pybullet.error", "\"\"\" Set angles on the robot's joints. Tests have to", "for name in names: contact_tuple = pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(),", "the links to be checked for self collision. WARNING: only", "in the WORLD frame physicsClientId - The id of the", "object, the resolution of the camera \"\"\" try: self.active_camera =", "\"\"\" Gets the position of the robot's base in the", "meshes should be used, otherwise the link cannot self collide", "of string containing the name of the joints to be", "exploitable from OpenCV \"\"\" try: assert self.active_camera is not None", "self.link_dict = dict() def loadRobot(self, translation, quaternion, physicsClientId=0): \"\"\" Loads", "raise pybullet.error(\"No active camera, resolution unavailable\") def getCameraLink(self): \"\"\" Returns", "not None return self.active_camera.getCameraLink() except KeyError: raise pybullet.error(\"No active camera,", "self.active_camera = self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution) except KeyError: print(\"This camera does not", "equal to 1 \"\"\" try: assert len(joint_names) ==\\ len(joint_values) ==\\", "raise pybullet.error(\"No active camera, cannot retrieve any frame\") def getCameraResolution(self):", "joint_names: joint_positions.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0]) return joint_positions def getAnglesVelocity(self, joint_names):", "link_names - String or list of string containing the names", "setAngles(self, joint_names, joint_values, percentage_speeds): \"\"\" Set angles on the robot's", "active_camera - Camera (CameraRgb or CameraDepth) object, the active camera", "raised. Returns: frame - The current camera frame as a", "camera_id: self.active_camera.unsubscribe() self.active_camera = None except KeyError: print(\"This camera does", "names = [link_names] else: assert set(link_names).issubset(self.link_dict.keys()) names = list(link_names) for", "self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0]) return joint_positions def getAnglesVelocity(self, joint_names): \"\"\" Gets", "the robot into a simulation, loads the joints and the", "pybullet.error(\"No active camera, resolution unavailable\") def getCameraLink(self): \"\"\" Returns the", "joint_names - List of string containing the name of the", "getActiveCamera(self): \"\"\" Returns the active camera of the robot. Returns:", "contact_tuple += pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) if len(contact_tuple) !=", "error will be raised. Returns: frame - The current camera", "Gets the position of the robot's base in the world", "in which the robot is supposed to be loaded Returns:", "self colliding \"\"\" try: if type(link_names) is str: assert link_names", "self.active_camera.getFrame() except AssertionError: raise pybullet.error(\"No active camera, cannot retrieve any", "self.physics_client def setAngles(self, joint_names, joint_values, percentage_speeds): \"\"\" Set angles on", "of the simulated instance in which the robot is supposed", "physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')] =\\ Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2]", "pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')] =\\ Joint(joint_info) else: # PYTHON 2 Version joint_info", "pybullet.error(\"No active camera, cannot retrieve any frame\") def getCameraResolution(self): \"\"\"", "for joint_name, joint_value, percentage_speed in zip( joint_names, joint_values, percentage_speeds): joint_speed", "List of values corresponding to the angles in radians to", "None except KeyError: print(\"This camera does not exist, use a", "Returns the pybullet model to which the module is associated.", "Returns the link of the active camera. Be advised that", "active camera is attached \"\"\" try: assert self.active_camera is not", "return self.active_camera.getFrame() except AssertionError: raise pybullet.error(\"No active camera, cannot retrieve", "of the virtual robot. For now, only URDF is handled", "camera id\") except AssertionError: pass def getCameraFrame(self): \"\"\" Returns a", "joint_positions.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0]) return joint_positions def getAnglesVelocity(self, joint_names): \"\"\"", "class to guarantee the validity of the input parameters. Parameters:", "to which the active camera is attached \"\"\" try: assert", "is no active camera, a None is returned \"\"\" return", "def setAngles(self, joint_names, joint_values, percentage_speeds): \"\"\" Set angles on the", "which the active camera is attached \"\"\" try: assert self.active_camera", "physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION | pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except pybullet.error as e: raise pybullet.error(\"Cannot", "camera holding the camera id. WARNING: at the moment, only", "for i in range(pybullet.getNumJoints( self.robot_model, physicsClientId=self.physics_client)): if IS_VERSION_PYTHON_3: # PYTHON", "the active camera of the robot. Returns: active_camera - Camera", "robot into a simulation, loads the joints and the links", "for joint_name in joint_names: joint_positions.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0]) return joint_positions", "the robot \"\"\" return self.robot_model def getPhysicsClientId(self): \"\"\" Returns the", "is unsubscribed assert self.active_camera is not None if self.active_camera.getCameraId() ==", "of the robot's base in the world frame. Returns: x", "getCameraFrame(self): \"\"\" Returns a camera frame. Be advised that the", "on the y axis in meters theta - The rotation", "+= pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) if len(contact_tuple) != 0:", "of the robot's base on the x axis, in meters", "is supposed to be loaded Returns: boolean - True if", "the module is loaded. Returns: physics_client - The id of", "joints are set to 0 rad. Parameters: translation - List", "to be loaded Returns: boolean - True if the method", "# PYTHON 3 version needs a conversion bytes to str", "Set angles on the robot's joints. Tests have to be", "speed in percentage_speeds) except AssertionError: raise pybullet.error(\"Error in the setAngles", "targetPosition=joint_value, maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client) def getAnglesPosition(self, joint_names): \"\"\" Gets the", "one camera can be subscribed. Parameters: camera_id - The id", "The positions of the robot's base on the y axis", "of the links to be checked for self collision. WARNING:", "- Boolean, if True at least one of the links", "z] of the robot in the WORLD frame quaternion -", "link\") def getActiveCamera(self): \"\"\" Returns the active camera of the", "in which the module is loaded. Returns: physics_client - The", "for self collision. WARNING: only the links with corresponding meshes", "# coding: utf-8 import sys import pybullet from qibullet.camera import", "which the module is associated. Returns: robot_model - The pybullet", "pybullet model to which the module is associated. Returns: robot_model", "assert self.active_camera is not None return self.active_camera.getFrame() except AssertionError: raise", "active camera \"\"\" try: assert self.active_camera is not None return", "camera of the robot. If there is no active camera,", "simulation in which the robot (possessing the module) is spawned", "is associated. Returns: robot_model - The pybullet model of the", "by the child class to guarantee the validity of the", "= 0 self.active_camera = None self.camera_dict = dict() self.joint_dict =", "Parameters: joint_names - List of string containing the names of", "linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) if len(contact_tuple) != 0: return True return False", "the links is self colliding \"\"\" try: if type(link_names) is", "directly exploitable from OpenCV \"\"\" try: assert self.active_camera is not", "joints and the links descriptions. The joints are set to", "set to 0 rad. Parameters: translation - List containing 3", "self.active_camera.getCameraId() == camera_id: self.active_camera.unsubscribe() self.active_camera = None except KeyError: print(\"This", "at least one of the links is self colliding \"\"\"", "!= 0: return True return False except AssertionError: raise pybullet.error(", "velocity of the robot's joints in rad/s. If one of", "attached \"\"\" try: assert self.active_camera is not None return self.active_camera.getCameraLink()", "= pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')] =\\ Link(joint_info) if joint_info[2]", "the joint's positions \"\"\" joint_positions = list() for joint_name in", "joints Returns: joint_velocities - List of floats containing the joint's", "physics_client - The id of the simulation in which the", "assert self.active_camera is not None return self.active_camera.getCameraLink() except KeyError: raise", "the joints Returns: joint_positions - List of floats containing the", "is not None return self.active_camera.getCameraLink() except KeyError: raise pybullet.error(\"No active", "joints in radians. If one of the joint doesn't exist,", "links is self colliding \"\"\" try: if type(link_names) is str:", "\"\"\" Gets the velocity of the robot's joints in rad/s.", "a pybullet error will be raised. Returns: resolution - a", "links to be checked for self collision. WARNING: only the", "return self.robot_model def getPhysicsClientId(self): \"\"\" Returns the id of the", "the quaternion [x, y, z, q] of the robot in", "in which the robot (possessing the module) is spawned \"\"\"", "\"\"\" Returns the active camera of the robot. Returns: active_camera", "\"\"\" position, quaternions = pybullet.getBasePositionAndOrientation( self.robot_model, physicsClientId=self.physics_client) theta = pybullet.getEulerFromQuaternion(quaternions)[2]", "and inferior or equal to 1 \"\"\" try: assert len(joint_names)", "is spawned \"\"\" return self.physics_client def setAngles(self, joint_names, joint_values, percentage_speeds):", "be raised. Returns: frame - The current camera frame as", "self.active_camera is not None return self.active_camera.getCameraLink() except KeyError: raise pybullet.error(\"No", "robot. For now, only URDF is handled \"\"\" self.description_file =", "not None return self.active_camera.getResolution() except KeyError: raise pybullet.error(\"No active camera,", "module) is spawned \"\"\" return self.physics_client def setAngles(self, joint_names, joint_values,", "self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1]) return joint_velocities def subscribeCamera(self, camera_id, resolution=Camera.K_QVGA): \"\"\"", "except KeyError: raise pybullet.error(\"No active camera, resolution unavailable\") def getCameraLink(self):", "\"\"\" return self.active_camera def getPosition(self): \"\"\" Gets the position of", "translation [x, y, z] of the robot in the WORLD", "in percentage_speeds) except AssertionError: raise pybullet.error(\"Error in the setAngles parameters\")", "object describing the resolution of the active camera \"\"\" try:", "position, quaternions = pybullet.getBasePositionAndOrientation( self.robot_model, physicsClientId=self.physics_client) theta = pybullet.getEulerFromQuaternion(quaternions)[2] return", "needs to be called beforehand, otherwise a pybullet error will", "the position of the robot's joints in radians. If one", "pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')] =\\ Joint(joint_info) else: #", "def getCameraFrame(self): \"\"\" Returns a camera frame. Be advised that", "The rotation of the robot's base on the z axis", "robot's joints. Tests have to be performed by the child", "raised. Returns: resolution - a Link object describing the link", "joints in rad/s. If one of the joint doesn't exist,", "try: self.active_camera = self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution) except KeyError: print(\"This camera does", "subscribed resolution - CameraResolution object, the resolution of the camera", "- Camera (CameraRgb or CameraDepth) object, the active camera of", "set(link_names).issubset(self.link_dict.keys()) names = list(link_names) for name in names: contact_tuple =", "the z axis in meters \"\"\" position, quaternions = pybullet.getBasePositionAndOrientation(", "is self colliding \"\"\" try: if type(link_names) is str: assert", "joint_name, joint_value, percentage_speed in zip( joint_names, joint_values, percentage_speeds): joint_speed =\\", "active camera, cannot retrieve any link\") def getActiveCamera(self): \"\"\" Returns", "\"\"\" joint_positions = list() for joint_name in joint_names: joint_positions.append(pybullet.getJointState( self.robot_model,", "\"\"\" Returns the resolution of the active camera. Be advised", "Joint(joint_info) else: # PYTHON 2 Version joint_info = pybullet.getJointInfo( self.robot_model,", "loads the joints and the links descriptions. The joints are", "Returns: resolution - a CameraResolution object describing the resolution of", "of the active camera. Be advised that the subscribeCamera method", "the WORLD frame quaternion - List containing 4 elements, the", "the name of the joints to be controlled joint_values -", "assert link_names in self.link_dict.keys() names = [link_names] else: assert set(link_names).issubset(self.link_dict.keys())", "descriptions. The joints are set to 0 rad. Parameters: translation", "robot in the WORLD frame physicsClientId - The id of", "physicsClientId=self.physics_client)): if IS_VERSION_PYTHON_3: # PYTHON 3 version needs a conversion", "max speed to be used for each joint, has to", "a virtual robot \"\"\" def __init__(self, description_file): \"\"\" Constructor Parameters:", "Camera (CameraRgb or CameraDepth) object, the active camera of the", "the robot's joints in rad/s. If one of the joint", "link_names): \"\"\" Specifies if a link is colliding with the", "does not exist, use a valid camera id\") def unsubscribeCamera(self,", "quaternions = pybullet.getBasePositionAndOrientation( self.robot_model, physicsClientId=self.physics_client) theta = pybullet.getEulerFromQuaternion(quaternions)[2] return position[0],", "camera, a None is returned \"\"\" return self.active_camera def getPosition(self):", "class RobotVirtual: \"\"\" Mother class representing a virtual robot \"\"\"", "from a camera, the one holding the camera id. Parameters:", "giving the description of the virtual robot. For now, only", "- List of string containing the name of the joints", "containing the joint's positions \"\"\" joint_positions = list() for joint_name", "to str joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')] =\\", "getAnglesVelocity(self, joint_names): \"\"\" Gets the velocity of the robot's joints", "camera, cannot retrieve any frame\") def getCameraResolution(self): \"\"\" Returns the", "subscribed. Parameters: camera_id - The id of the camera to", "self.active_camera is not None return self.active_camera.getFrame() except AssertionError: raise pybullet.error(\"No", "use a valid camera id\") def unsubscribeCamera(self, camera_id): \"\"\" Unsubscribe", "joint_name in joint_names: joint_positions.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[0]) return joint_positions def", "robot's base on the z axis in meters \"\"\" position,", "joints Returns: joint_positions - List of floats containing the joint's", "instance in which the module is loaded. Returns: physics_client -", "string containing the names of the joints Returns: joint_positions -", "qibullet.camera import * from qibullet.link import Link from qibullet.joint import", "pybullet from qibullet.camera import * from qibullet.link import Link from", "otherwise the link cannot self collide Returns: self_colliding - Boolean,", "pybullet.POSITION_CONTROL, targetPosition=joint_value, maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client) def getAnglesPosition(self, joint_names): \"\"\" Gets", "\"\"\" Mother class representing a virtual robot \"\"\" def __init__(self,", "robot. Returns: active_camera - Camera (CameraRgb or CameraDepth) object, the", "id\") def unsubscribeCamera(self, camera_id): \"\"\" Unsubscribe from a camera, the", "be performed by the child class to guarantee the validity", "q] of the robot in the WORLD frame physicsClientId -", "i, physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')] =\\ Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\", "== pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')] =\\ Joint(joint_info) else: # PYTHON 2 Version", "physicsClientId=self.physics_client) def getAnglesPosition(self, joint_names): \"\"\" Gets the position of the", "Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')]", "physicsClientId=self.physics_client) theta = pybullet.getEulerFromQuaternion(quaternions)[2] return position[0], position[1], theta def isSelfColliding(self,", "*\\ percentage_speed pybullet.setJointMotorControl2( self.robot_model, self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL, targetPosition=joint_value, maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client)", "a valid camera id\") def unsubscribeCamera(self, camera_id): \"\"\" Unsubscribe from", "a conversion bytes to str joint_info = pybullet.getJointInfo( self.robot_model, i,", "the subscribeCamera method needs to be called beforehand, otherwise a", "string containing the name of the joints to be controlled", "otherwise a pybullet error will be raised. Returns: resolution -", "WORLD frame quaternion - List containing 4 elements, the quaternion", "force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client) def getAnglesPosition(self, joint_names): \"\"\" Gets the position of", "bytes to str joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')]", "physicsClientId=0): \"\"\" Loads the robot into a simulation, loads the", "The joints are set to 0 rad. Parameters: translation -", "corresponding to the angles in radians to be applied percentage_speeds", "= pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12]] = Link(joint_info) if joint_info[2]", "speed >= 0.0 and speed <= 1.0 for speed in", "joint_velocities def subscribeCamera(self, camera_id, resolution=Camera.K_QVGA): \"\"\" Subscribe to the camera", "will be raised. Returns: frame - The current camera frame", "description_file): \"\"\" Constructor Parameters: description_file - The file giving the", "active camera. Be advised that the subscribeCamera method needs to", "from qibullet.link import Link from qibullet.joint import Joint IS_VERSION_PYTHON_3 =", "of the joints to be controlled joint_values - List of", "self.active_camera is not None return self.active_camera.getResolution() except KeyError: raise pybullet.error(\"No", "meters theta - The rotation of the robot's base on", "is colliding with the rest of the virtual robot. Parameters:", "the simulation in which the robot (possessing the module) is", "parameters. Parameters: joint_names - List of string containing the name", "\"\"\" try: assert self.active_camera is not None return self.active_camera.getCameraLink() except", "the WORLD frame physicsClientId - The id of the simulated", "joint_velocities = list() for joint_name in joint_names: joint_velocities.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(),", "WARNING: only the links with corresponding meshes should be used,", "Specifies if a link is colliding with the rest of", "in rad/s. If one of the joint doesn't exist, the", "link_names in self.link_dict.keys() names = [link_names] else: assert set(link_names).issubset(self.link_dict.keys()) names", "not exist, use a valid camera id\") def unsubscribeCamera(self, camera_id):", "the joints to be controlled joint_values - List of values", "the active camera \"\"\" try: assert self.active_camera is not None", "the robot is supposed to be loaded Returns: boolean -", "the angles in radians to be applied percentage_speeds - Percentages", "Unsubscribe from a camera, the one holding the camera id.", "the method ran correctly, False otherwise \"\"\" try: self.physics_client =", "joints to be controlled joint_values - List of values corresponding", "a formatted numpy array, directly exploitable from OpenCV \"\"\" try:", "- List of floats containing the joint's velocities \"\"\" joint_velocities", "of the robot in the WORLD frame physicsClientId - The", "the robot. Returns: active_camera - Camera (CameraRgb or CameraDepth) object,", "- a Link object describing the link to which the", "- The id of the simulated instance in which the", "from qibullet.joint import Joint IS_VERSION_PYTHON_3 = sys.version_info[0] >= 3 class", "\"\"\" Gets the position of the robot's joints in radians.", "to guarantee the validity of the input parameters. Parameters: joint_names", "position of the robot's base on the x axis, in", "in range(pybullet.getNumJoints( self.robot_model, physicsClientId=self.physics_client)): if IS_VERSION_PYTHON_3: # PYTHON 3 version", "except AssertionError: raise pybullet.error(\"Error in the setAngles parameters\") for joint_name,", "- The id of the camera to be unsubscribed \"\"\"", "the active camera. Be advised that the subscribeCamera method needs", "joint_name in joint_names: joint_velocities.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1]) return joint_velocities def", "KeyError: raise pybullet.error(\"No active camera, resolution unavailable\") def getCameraLink(self): \"\"\"", "of the robot. If there is no active camera, a", "retrieve any link\") def getActiveCamera(self): \"\"\" Returns the active camera", "camera does not exist, use a valid camera id\") def", "to be checked for self collision. WARNING: only the links", "angles in radians to be applied percentage_speeds - Percentages of", "to 0 and inferior or equal to 1 \"\"\" try:", "resolution of the active camera \"\"\" try: assert self.active_camera is", "id of the simulated instance in which the module is", "def getRobotModel(self): \"\"\" Returns the pybullet model to which the", "dict() self.link_dict = dict() def loadRobot(self, translation, quaternion, physicsClientId=0): \"\"\"", "Returns: self_colliding - Boolean, if True at least one of", "and the links descriptions. The joints are set to 0", "the joint doesn't exist, the method will raise a KeyError.", "the y axis in meters theta - The rotation of", "robot \"\"\" def __init__(self, description_file): \"\"\" Constructor Parameters: description_file -", "bodyB=self.robot_model, linkIndexA=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) contact_tuple += pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client)", "+ str(e)) for i in range(pybullet.getNumJoints( self.robot_model, physicsClientId=self.physics_client)): if IS_VERSION_PYTHON_3:", "link cannot self collide Returns: self_colliding - Boolean, if True", "except AssertionError: pass def getCameraFrame(self): \"\"\" Returns a camera frame.", "be unsubscribed \"\"\" try: # If no active camera is", "maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(), physicsClientId=self.physics_client) def getAnglesPosition(self, joint_names): \"\"\" Gets the position", "CameraDepth) object, the active camera of the robot. If there", "correctly, False otherwise \"\"\" try: self.physics_client = physicsClientId self.robot_model =", "return joint_velocities def subscribeCamera(self, camera_id, resolution=Camera.K_QVGA): \"\"\" Subscribe to the", "conversion bytes to str joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client)", "the x axis, in meters y - The positions of", "Boolean, if True at least one of the links is", "advised that the subscribeCamera method needs to be called beforehand,", "the pybullet model to which the module is associated. Returns:", "strictly superior to 0 and inferior or equal to 1", "is handled \"\"\" self.description_file = description_file self.physics_client = 0 self.active_camera", "meters y - The positions of the robot's base on", "<= 1.0 for speed in percentage_speeds) except AssertionError: raise pybullet.error(\"Error", "a pybullet error will be raised. Returns: frame - The", "= list() for joint_name in joint_names: joint_velocities.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1])", "be used for each joint, has to be strictly superior", "associated. Returns: robot_model - The pybullet model of the robot", "parameters\") for joint_name, joint_value, percentage_speed in zip( joint_names, joint_values, percentage_speeds):", "joints. Tests have to be performed by the child class", "joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')] =\\ Joint(joint_info) else: # PYTHON 2", "getAnglesPosition(self, joint_names): \"\"\" Gets the position of the robot's joints", "assert all( speed >= 0.0 and speed <= 1.0 for", "len(percentage_speeds) assert all( speed >= 0.0 and speed <= 1.0", "have to be performed by the child class to guarantee", "to be subscribed resolution - CameraResolution object, the resolution of", "name of the joints to be controlled joint_values - List", "joint_names - List of string containing the names of the", "zip( joint_names, joint_values, percentage_speeds): joint_speed =\\ self.joint_dict[joint_name].getMaxVelocity() *\\ percentage_speed pybullet.setJointMotorControl2(", "handled \"\"\" self.description_file = description_file self.physics_client = 0 self.active_camera =", "- The id of the camera to be subscribed resolution", "self.active_camera.subscribe(resolution=resolution) except KeyError: print(\"This camera does not exist, use a", "\"\"\" Subscribe to the camera holding the camera id. WARNING:", "position of the robot's base in the world frame. Returns:", "pybullet.loadURDF( self.description_file, translation, quaternion, useFixedBase=False, globalScaling=1.0, physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION | pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL)", "try: assert self.active_camera is not None return self.active_camera.getResolution() except KeyError:", "with the rest of the virtual robot. Parameters: link_names -", "robot. If there is no active camera, a None is", "frame as a formatted numpy array, directly exploitable from OpenCV", "not exist, use a valid camera id\") except AssertionError: pass", "if the method ran correctly, False otherwise \"\"\" try: self.physics_client", "model of the robot \"\"\" return self.robot_model def getPhysicsClientId(self): \"\"\"", "PYTHON 3 version needs a conversion bytes to str joint_info", "if type(link_names) is str: assert link_names in self.link_dict.keys() names =", "robot \"\"\" return self.robot_model def getPhysicsClientId(self): \"\"\" Returns the id", "be raised. Returns: resolution - a Link object describing the", "translation - List containing 3 elements, the translation [x, y,", "physicsClientId=self.physics_client) if len(contact_tuple) != 0: return True return False except", "qibullet.joint import Joint IS_VERSION_PYTHON_3 = sys.version_info[0] >= 3 class RobotVirtual:", "simulated instance in which the robot is supposed to be", "all( speed >= 0.0 and speed <= 1.0 for speed", "\"\"\" joint_velocities = list() for joint_name in joint_names: joint_velocities.append(pybullet.getJointState( self.robot_model,", "the camera to be subscribed resolution - CameraResolution object, the", "of values corresponding to the angles in radians to be", "OpenCV \"\"\" try: assert self.active_camera is not None return self.active_camera.getFrame()", "0: return True return False except AssertionError: raise pybullet.error( \"Unauthorized", "the description of the virtual robot. For now, only URDF", "self.camera_dict[camera_id] self.active_camera.subscribe(resolution=resolution) except KeyError: print(\"This camera does not exist, use", "physicsClientId self.robot_model = pybullet.loadURDF( self.description_file, translation, quaternion, useFixedBase=False, globalScaling=1.0, physicsClientId=self.physics_client,", "be subscribed. Parameters: camera_id - The id of the camera", "= physicsClientId self.robot_model = pybullet.loadURDF( self.description_file, translation, quaternion, useFixedBase=False, globalScaling=1.0,", "any frame\") def getCameraResolution(self): \"\"\" Returns the resolution of the", "physicsClientId=self.physics_client) self.link_dict[joint_info[12]] = Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2]", "camera to be subscribed resolution - CameraResolution object, the resolution", "if True at least one of the links is self", "robot model: \" + str(e)) for i in range(pybullet.getNumJoints( self.robot_model,", "boolean - True if the method ran correctly, False otherwise", "getRobotModel(self): \"\"\" Returns the pybullet model to which the module", "from qibullet.camera import * from qibullet.link import Link from qibullet.joint", "joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12].decode('utf-8')] =\\ Link(joint_info) if", "the joint's velocities \"\"\" joint_velocities = list() for joint_name in", "if a link is colliding with the rest of the", "pybullet model of the robot \"\"\" return self.robot_model def getPhysicsClientId(self):", "- List containing 3 elements, the translation [x, y, z]", "robot (possessing the module) is spawned \"\"\" return self.physics_client def", "The pybullet model of the robot \"\"\" return self.robot_model def", "camera_id, resolution=Camera.K_QVGA): \"\"\" Subscribe to the camera holding the camera", "the camera holding the camera id. WARNING: at the moment,", "not None return self.active_camera.getFrame() except AssertionError: raise pybullet.error(\"No active camera,", "names of the joints Returns: joint_positions - List of floats", "- The positions of the robot's base on the y", "self.joint_dict[joint_name].getMaxVelocity() *\\ percentage_speed pybullet.setJointMotorControl2( self.robot_model, self.joint_dict[joint_name].getIndex(), pybullet.POSITION_CONTROL, targetPosition=joint_value, maxVelocity=joint_speed, force=self.joint_dict[joint_name].getMaxEffort(),", "camera id. Parameters: camera_id - The id of the camera", "def getAnglesPosition(self, joint_names): \"\"\" Gets the position of the robot's", "camera, resolution unavailable\") def getCameraLink(self): \"\"\" Returns the link of", "If there is no active camera, a None is returned", "Returns: joint_velocities - List of floats containing the joint's velocities", "\"\"\" self.description_file = description_file self.physics_client = 0 self.active_camera = None", "no active camera, a None is returned \"\"\" return self.active_camera", "in meters \"\"\" position, quaternions = pybullet.getBasePositionAndOrientation( self.robot_model, physicsClientId=self.physics_client) theta", "used, otherwise the link cannot self collide Returns: self_colliding -", "beforehand, otherwise a pybullet error will be raised. Returns: frame", "def getAnglesVelocity(self, joint_names): \"\"\" Gets the velocity of the robot's", "Subscribe to the camera holding the camera id. WARNING: at", "if len(contact_tuple) != 0: return True return False except AssertionError:", "# PYTHON 2 Version joint_info = pybullet.getJointInfo( self.robot_model, i, physicsClientId=self.physics_client)", "Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1]]", "Returns a camera frame. Be advised that the subscribeCamera method", "def getCameraResolution(self): \"\"\" Returns the resolution of the active camera.", "x - The position of the robot's base on the", "flags=pybullet.URDF_USE_SELF_COLLISION | pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except pybullet.error as e: raise pybullet.error(\"Cannot load", "- The position of the robot's base on the x", "Link object describing the link to which the active camera", "List containing 4 elements, the quaternion [x, y, z, q]", "floats containing the joint's positions \"\"\" joint_positions = list() for", "id. WARNING: at the moment, only one camera can be", "- The file giving the description of the virtual robot.", "resolution - CameraResolution object, the resolution of the camera \"\"\"", "= pybullet.loadURDF( self.description_file, translation, quaternion, useFixedBase=False, globalScaling=1.0, physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION |", "virtual robot. For now, only URDF is handled \"\"\" self.description_file", "rotation of the robot's base on the z axis in", "except KeyError: print(\"This camera does not exist, use a valid", "valid camera id\") def unsubscribeCamera(self, camera_id): \"\"\" Unsubscribe from a", "For now, only URDF is handled \"\"\" self.description_file = description_file", "exist, the method will raise a KeyError. Parameters: joint_names -", "def loadRobot(self, translation, quaternion, physicsClientId=0): \"\"\" Loads the robot into", "the simulated instance in which the module is loaded. Returns:", "called beforehand, otherwise a pybullet error will be raised. Returns:", "the resolution of the camera \"\"\" try: self.active_camera = self.camera_dict[camera_id]", "physicsClientId=self.physics_client) contact_tuple += pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model, linkIndexB=self.link_dict[name].getIndex(), physicsClientId=self.physics_client) if len(contact_tuple)", "that the subscribeCamera method needs to be called beforehand, otherwise", "==\\ len(joint_values) ==\\ len(percentage_speeds) assert all( speed >= 0.0 and", "self.physics_client = 0 self.active_camera = None self.camera_dict = dict() self.joint_dict", "- The id of the simulation in which the robot", "is returned \"\"\" return self.active_camera def getPosition(self): \"\"\" Gets the", "pass def getCameraFrame(self): \"\"\" Returns a camera frame. Be advised", "the robot's base on the z axis in meters \"\"\"", "[link_names] else: assert set(link_names).issubset(self.link_dict.keys()) names = list(link_names) for name in", "Parameters: translation - List containing 3 elements, the translation [x,", "joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')] =\\ Joint(joint_info)", "otherwise \"\"\" try: self.physics_client = physicsClientId self.robot_model = pybullet.loadURDF( self.description_file,", "\" + str(e)) for i in range(pybullet.getNumJoints( self.robot_model, physicsClientId=self.physics_client)): if", "be subscribed resolution - CameraResolution object, the resolution of the", "names of the joints Returns: joint_velocities - List of floats", "pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL) except pybullet.error as e: raise pybullet.error(\"Cannot load robot model:", "robot's base in the world frame. Returns: x - The", "a camera frame. Be advised that the subscribeCamera method needs", "of the links is self colliding \"\"\" try: if type(link_names)", "speed to be used for each joint, has to be", "self.robot_model, i, physicsClientId=self.physics_client) self.link_dict[joint_info[12]] = Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC", "numpy array, directly exploitable from OpenCV \"\"\" try: assert self.active_camera", "list of string containing the names of the links to", "self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1]) return joint_velocities def subscribeCamera(self, camera_id, resolution=Camera.K_QVGA): \"\"\" Subscribe", "i, physicsClientId=self.physics_client) self.link_dict[joint_info[12]] = Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\", "raise pybullet.error(\"Cannot load robot model: \" + str(e)) for i", "used for each joint, has to be strictly superior to", "unsubscribed assert self.active_camera is not None if self.active_camera.getCameraId() == camera_id:", "self.joint_dict = dict() self.link_dict = dict() def loadRobot(self, translation, quaternion,", "or\\ joint_info[2] == pybullet.JOINT_REVOLUTE: self.joint_dict[joint_info[1].decode('utf-8')] =\\ Joint(joint_info) else: # PYTHON", "to be applied percentage_speeds - Percentages of the max speed", "to the camera holding the camera id. WARNING: at the", "\"\"\" Constructor Parameters: description_file - The file giving the description", "joint doesn't exist, the method will raise a KeyError. Parameters:", "of the robot's joints in radians. If one of the", "raise a KeyError. Parameters: joint_names - List of string containing", "Be advised that the subscribeCamera method needs to be called", "y, z] of the robot in the WORLD frame quaternion", "is not None return self.active_camera.getFrame() except AssertionError: raise pybullet.error(\"No active", "loaded Returns: boolean - True if the method ran correctly,", "percentage_speeds - Percentages of the max speed to be used", "inferior or equal to 1 \"\"\" try: assert len(joint_names) ==\\", "\"\"\" try: # If no active camera is found, nothing", "Returns: active_camera - Camera (CameraRgb or CameraDepth) object, the active", "of the camera to be subscribed resolution - CameraResolution object,", "id. Parameters: camera_id - The id of the camera to", "one of the links is self colliding \"\"\" try: if", "list(link_names) for name in names: contact_tuple = pybullet.getContactPoints( bodyA=self.robot_model, bodyB=self.robot_model,", "physicsClientId - The id of the simulated instance in which", "\"\"\" return self.physics_client def setAngles(self, joint_names, joint_values, percentage_speeds): \"\"\" Set", "validity of the input parameters. Parameters: joint_names - List of", "in radians. If one of the joint doesn't exist, the", "method will raise a KeyError. Parameters: joint_names - List of", "axis, in meters y - The positions of the robot's", "= pybullet.getEulerFromQuaternion(quaternions)[2] return position[0], position[1], theta def isSelfColliding(self, link_names): \"\"\"", "in meters theta - The rotation of the robot's base", "Returns: x - The position of the robot's base on", "theta - The rotation of the robot's base on the", "str: assert link_names in self.link_dict.keys() names = [link_names] else: assert", "except AssertionError: raise pybullet.error( \"Unauthorized link checking for self collisions\")", "- Percentages of the max speed to be used for", "representing a virtual robot \"\"\" def __init__(self, description_file): \"\"\" Constructor", "beforehand, otherwise a pybullet error will be raised. Returns: resolution", "camera id. WARNING: at the moment, only one camera can", "e: raise pybullet.error(\"Cannot load robot model: \" + str(e)) for", "active camera is found, nothing is unsubscribed assert self.active_camera is", "True if the method ran correctly, False otherwise \"\"\" try:", "self.robot_model = pybullet.loadURDF( self.description_file, translation, quaternion, useFixedBase=False, globalScaling=1.0, physicsClientId=self.physics_client, flags=pybullet.URDF_USE_SELF_COLLISION", "of the simulated instance in which the module is loaded.", "return joint_positions def getAnglesVelocity(self, joint_names): \"\"\" Gets the velocity of", "unsubscribeCamera(self, camera_id): \"\"\" Unsubscribe from a camera, the one holding", "nothing is unsubscribed assert self.active_camera is not None if self.active_camera.getCameraId()", "except AssertionError: raise pybullet.error(\"No active camera, cannot retrieve any frame\")", "- a CameraResolution object describing the resolution of the active", "will be raised. Returns: resolution - a Link object describing", "the active camera is attached \"\"\" try: assert self.active_camera is", "axis in meters \"\"\" position, quaternions = pybullet.getBasePositionAndOrientation( self.robot_model, physicsClientId=self.physics_client)", "The id of the simulated instance in which the robot", "\"\"\" Unsubscribe from a camera, the one holding the camera", "positions of the robot's base on the y axis in", "string containing the names of the links to be checked", "the names of the joints Returns: joint_velocities - List of", "simulation, loads the joints and the links descriptions. The joints", "==\\ len(percentage_speeds) assert all( speed >= 0.0 and speed <=", "Parameters: camera_id - The id of the camera to be", "spawned \"\"\" return self.physics_client def setAngles(self, joint_names, joint_values, percentage_speeds): \"\"\"", "[x, y, z] of the robot in the WORLD frame", "Returns: robot_model - The pybullet model of the robot \"\"\"", "in joint_names: joint_velocities.append(pybullet.getJointState( self.robot_model, self.joint_dict[joint_name].getIndex(), physicsClientId=self.physics_client)[1]) return joint_velocities def subscribeCamera(self,", "can be subscribed. Parameters: camera_id - The id of the", "try: assert self.active_camera is not None return self.active_camera.getFrame() except AssertionError:", "containing the joint's velocities \"\"\" joint_velocities = list() for joint_name", "self.link_dict[joint_info[12].decode('utf-8')] =\\ Link(joint_info) if joint_info[2] == pybullet.JOINT_PRISMATIC or\\ joint_info[2] ==", "the joints and the links descriptions. The joints are set", "IS_VERSION_PYTHON_3 = sys.version_info[0] >= 3 class RobotVirtual: \"\"\" Mother class", "joint_names): \"\"\" Gets the position of the robot's joints in", "holding the camera id. WARNING: at the moment, only one", "Gets the velocity of the robot's joints in rad/s. If" ]